1/* -*- c -*-
2 ----------------------------------------------------------------
3
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
9
10 ----------------------------------------------------------------
11
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
14
15 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
16
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
20
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
23
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
28
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
31
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
35
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47
48 ----------------------------------------------------------------
49
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
54
55 ----------------------------------------------------------------
56*/
57
58
59/* This file is for inclusion into client (your!) code.
60
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
63
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
72
73#ifndef __VALGRIND_H
74#define __VALGRIND_H
75
76
77/* ------------------------------------------------------------------ */
78/* VERSION NUMBER OF VALGRIND */
79/* ------------------------------------------------------------------ */
80
81/* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
86
87#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90*/
91#define __VALGRIND_MAJOR__ 3
92#define __VALGRIND_MINOR__ 14
93
94
95#include <stdarg.h>
96
97/* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
99 use "__asm__"). */
100
101/* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
107
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
110*/
111#undef PLAT_x86_darwin
112#undef PLAT_amd64_darwin
113#undef PLAT_x86_win32
114#undef PLAT_amd64_win64
115#undef PLAT_x86_linux
116#undef PLAT_amd64_linux
117#undef PLAT_ppc32_linux
118#undef PLAT_ppc64be_linux
119#undef PLAT_ppc64le_linux
120#undef PLAT_arm_linux
121#undef PLAT_arm64_linux
122#undef PLAT_s390x_linux
123#undef PLAT_mips32_linux
124#undef PLAT_mips64_linux
125#undef PLAT_x86_solaris
126#undef PLAT_amd64_solaris
127
128
129#if defined(__APPLE__) && defined(__i386__)
130# define PLAT_x86_darwin 1
131#elif defined(__APPLE__) && defined(__x86_64__)
132# define PLAT_amd64_darwin 1
133#elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
134 || defined(__CYGWIN32__) \
135 || (defined(_WIN32) && defined(_M_IX86))
136# define PLAT_x86_win32 1
137#elif defined(__MINGW64__) \
138 || (defined(_WIN64) && defined(_M_X64))
139# define PLAT_amd64_win64 1
140#elif defined(__linux__) && defined(__i386__)
141# define PLAT_x86_linux 1
142#elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
143# define PLAT_amd64_linux 1
144#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
145# define PLAT_ppc32_linux 1
146#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
147/* Big Endian uses ELF version 1 */
148# define PLAT_ppc64be_linux 1
149#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
150/* Little Endian uses ELF version 2 */
151# define PLAT_ppc64le_linux 1
152#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
153# define PLAT_arm_linux 1
154#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
155# define PLAT_arm64_linux 1
156#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
157# define PLAT_s390x_linux 1
158#elif defined(__linux__) && defined(__mips__) && (__mips==64)
159# define PLAT_mips64_linux 1
160#elif defined(__linux__) && defined(__mips__) && (__mips!=64)
161# define PLAT_mips32_linux 1
162#elif defined(__sun) && defined(__i386__)
163# define PLAT_x86_solaris 1
164#elif defined(__sun) && defined(__x86_64__)
165# define PLAT_amd64_solaris 1
166#else
167/* If we're not compiling for our target platform, don't generate
168 any inline asms. */
169# if !defined(NVALGRIND)
170# define NVALGRIND 1
171# endif
172#endif
173
174
175/* ------------------------------------------------------------------ */
176/* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
177/* in here of use to end-users -- skip to the next section. */
178/* ------------------------------------------------------------------ */
179
180/*
181 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
182 * request. Accepts both pointers and integers as arguments.
183 *
184 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
185 * client request that does not return a value.
186
187 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
188 * client request and whose value equals the client request result. Accepts
189 * both pointers and integers as arguments. Note that such calls are not
190 * necessarily pure functions -- they may have side effects.
191 */
192
193#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
194 _zzq_request, _zzq_arg1, _zzq_arg2, \
195 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
196 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
197 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
198 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
199
200#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
201 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
202 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
203 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
204 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
205
206#if defined(NVALGRIND)
207
208/* Define NVALGRIND to completely remove the Valgrind magic sequence
209 from the compiled code (analogous to NDEBUG's effects on
210 assert()) */
211#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
212 _zzq_default, _zzq_request, \
213 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
214 (_zzq_default)
215
216#else /* ! NVALGRIND */
217
218/* The following defines the magic code sequences which the JITter
219 spots and handles magically. Don't look too closely at them as
220 they will rot your brain.
221
222 The assembly code sequences for all architectures is in this one
223 file. This is because this file must be stand-alone, and we don't
224 want to have multiple files.
225
226 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
227 value gets put in the return slot, so that everything works when
228 this is executed not under Valgrind. Args are passed in a memory
229 block, and so there's no intrinsic limit to the number that could
230 be passed, but it's currently five.
231
232 The macro args are:
233 _zzq_rlval result lvalue
234 _zzq_default default value (result returned when running on real CPU)
235 _zzq_request request code
236 _zzq_arg1..5 request params
237
238 The other two macros are used to support function wrapping, and are
239 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
240 guest's NRADDR pseudo-register and whatever other information is
241 needed to safely run the call original from the wrapper: on
242 ppc64-linux, the R2 value at the divert point is also needed. This
243 information is abstracted into a user-visible type, OrigFn.
244
245 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
246 guest, but guarantees that the branch instruction will not be
247 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
248 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
249 complete inline asm, since it needs to be combined with more magic
250 inline asm stuff to be useful.
251*/
252
253/* ----------------- x86-{linux,darwin,solaris} ---------------- */
254
255#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
256 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
257 || defined(PLAT_x86_solaris)
258
259typedef
260 struct {
261 unsigned int nraddr; /* where's the code? */
262 }
263 OrigFn;
264
265#define __SPECIAL_INSTRUCTION_PREAMBLE \
266 "roll $3, %%edi ; roll $13, %%edi\n\t" \
267 "roll $29, %%edi ; roll $19, %%edi\n\t"
268
269#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
270 _zzq_default, _zzq_request, \
271 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
272 __extension__ \
273 ({volatile unsigned int _zzq_args[6]; \
274 volatile unsigned int _zzq_result; \
275 _zzq_args[0] = (unsigned int)(_zzq_request); \
276 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
277 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
278 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
279 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
280 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
281 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
282 /* %EDX = client_request ( %EAX ) */ \
283 "xchgl %%ebx,%%ebx" \
284 : "=d" (_zzq_result) \
285 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
286 : "cc", "memory" \
287 ); \
288 _zzq_result; \
289 })
290
291#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
292 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
293 volatile unsigned int __addr; \
294 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
295 /* %EAX = guest_NRADDR */ \
296 "xchgl %%ecx,%%ecx" \
297 : "=a" (__addr) \
298 : \
299 : "cc", "memory" \
300 ); \
301 _zzq_orig->nraddr = __addr; \
302 }
303
304#define VALGRIND_CALL_NOREDIR_EAX \
305 __SPECIAL_INSTRUCTION_PREAMBLE \
306 /* call-noredir *%EAX */ \
307 "xchgl %%edx,%%edx\n\t"
308
309#define VALGRIND_VEX_INJECT_IR() \
310 do { \
311 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
312 "xchgl %%edi,%%edi\n\t" \
313 : : : "cc", "memory" \
314 ); \
315 } while (0)
316
317#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
318 || PLAT_x86_solaris */
319
320/* ------------------------- x86-Win32 ------------------------- */
321
322#if defined(PLAT_x86_win32) && !defined(__GNUC__)
323
324typedef
325 struct {
326 unsigned int nraddr; /* where's the code? */
327 }
328 OrigFn;
329
330#if defined(_MSC_VER)
331
332#define __SPECIAL_INSTRUCTION_PREAMBLE \
333 __asm rol edi, 3 __asm rol edi, 13 \
334 __asm rol edi, 29 __asm rol edi, 19
335
336#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
337 _zzq_default, _zzq_request, \
338 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
339 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
340 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
341 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
342 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
343
344static __inline uintptr_t
345valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
346 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
347 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
348 uintptr_t _zzq_arg5)
349{
350 volatile uintptr_t _zzq_args[6];
351 volatile unsigned int _zzq_result;
352 _zzq_args[0] = (uintptr_t)(_zzq_request);
353 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
354 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
355 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
356 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
357 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
358 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
359 __SPECIAL_INSTRUCTION_PREAMBLE
360 /* %EDX = client_request ( %EAX ) */
361 __asm xchg ebx,ebx
362 __asm mov _zzq_result, edx
363 }
364 return _zzq_result;
365}
366
367#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
368 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
369 volatile unsigned int __addr; \
370 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
371 /* %EAX = guest_NRADDR */ \
372 __asm xchg ecx,ecx \
373 __asm mov __addr, eax \
374 } \
375 _zzq_orig->nraddr = __addr; \
376 }
377
378#define VALGRIND_CALL_NOREDIR_EAX ERROR
379
380#define VALGRIND_VEX_INJECT_IR() \
381 do { \
382 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
383 __asm xchg edi,edi \
384 } \
385 } while (0)
386
387#else
388#error Unsupported compiler.
389#endif
390
391#endif /* PLAT_x86_win32 */
392
393/* ----------------- amd64-{linux,darwin,solaris} --------------- */
394
395#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
396 || defined(PLAT_amd64_solaris) \
397 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
398
399typedef
400 struct {
401 unsigned long int nraddr; /* where's the code? */
402 }
403 OrigFn;
404
405#define __SPECIAL_INSTRUCTION_PREAMBLE \
406 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
407 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
408
409#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
410 _zzq_default, _zzq_request, \
411 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
412 __extension__ \
413 ({ volatile unsigned long int _zzq_args[6]; \
414 volatile unsigned long int _zzq_result; \
415 _zzq_args[0] = (unsigned long int)(_zzq_request); \
416 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
417 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
418 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
419 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
420 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
421 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
422 /* %RDX = client_request ( %RAX ) */ \
423 "xchgq %%rbx,%%rbx" \
424 : "=d" (_zzq_result) \
425 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
426 : "cc", "memory" \
427 ); \
428 _zzq_result; \
429 })
430
431#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
432 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
433 volatile unsigned long int __addr; \
434 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
435 /* %RAX = guest_NRADDR */ \
436 "xchgq %%rcx,%%rcx" \
437 : "=a" (__addr) \
438 : \
439 : "cc", "memory" \
440 ); \
441 _zzq_orig->nraddr = __addr; \
442 }
443
444#define VALGRIND_CALL_NOREDIR_RAX \
445 __SPECIAL_INSTRUCTION_PREAMBLE \
446 /* call-noredir *%RAX */ \
447 "xchgq %%rdx,%%rdx\n\t"
448
449#define VALGRIND_VEX_INJECT_IR() \
450 do { \
451 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
452 "xchgq %%rdi,%%rdi\n\t" \
453 : : : "cc", "memory" \
454 ); \
455 } while (0)
456
457#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
458
459/* ------------------------- amd64-Win64 ------------------------- */
460
461#if defined(PLAT_amd64_win64) && !defined(__GNUC__)
462
463#error Unsupported compiler.
464
465#endif /* PLAT_amd64_win64 */
466
467/* ------------------------ ppc32-linux ------------------------ */
468
469#if defined(PLAT_ppc32_linux)
470
471typedef
472 struct {
473 unsigned int nraddr; /* where's the code? */
474 }
475 OrigFn;
476
477#define __SPECIAL_INSTRUCTION_PREAMBLE \
478 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
479 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
480
481#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
482 _zzq_default, _zzq_request, \
483 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
484 \
485 __extension__ \
486 ({ unsigned int _zzq_args[6]; \
487 unsigned int _zzq_result; \
488 unsigned int* _zzq_ptr; \
489 _zzq_args[0] = (unsigned int)(_zzq_request); \
490 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
491 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
492 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
493 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
494 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
495 _zzq_ptr = _zzq_args; \
496 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
497 "mr 4,%2\n\t" /*ptr*/ \
498 __SPECIAL_INSTRUCTION_PREAMBLE \
499 /* %R3 = client_request ( %R4 ) */ \
500 "or 1,1,1\n\t" \
501 "mr %0,3" /*result*/ \
502 : "=b" (_zzq_result) \
503 : "b" (_zzq_default), "b" (_zzq_ptr) \
504 : "cc", "memory", "r3", "r4"); \
505 _zzq_result; \
506 })
507
508#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
509 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
510 unsigned int __addr; \
511 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
512 /* %R3 = guest_NRADDR */ \
513 "or 2,2,2\n\t" \
514 "mr %0,3" \
515 : "=b" (__addr) \
516 : \
517 : "cc", "memory", "r3" \
518 ); \
519 _zzq_orig->nraddr = __addr; \
520 }
521
522#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
523 __SPECIAL_INSTRUCTION_PREAMBLE \
524 /* branch-and-link-to-noredir *%R11 */ \
525 "or 3,3,3\n\t"
526
527#define VALGRIND_VEX_INJECT_IR() \
528 do { \
529 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
530 "or 5,5,5\n\t" \
531 ); \
532 } while (0)
533
534#endif /* PLAT_ppc32_linux */
535
536/* ------------------------ ppc64-linux ------------------------ */
537
538#if defined(PLAT_ppc64be_linux)
539
540typedef
541 struct {
542 unsigned long int nraddr; /* where's the code? */
543 unsigned long int r2; /* what tocptr do we need? */
544 }
545 OrigFn;
546
547#define __SPECIAL_INSTRUCTION_PREAMBLE \
548 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
549 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
550
551#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
552 _zzq_default, _zzq_request, \
553 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
554 \
555 __extension__ \
556 ({ unsigned long int _zzq_args[6]; \
557 unsigned long int _zzq_result; \
558 unsigned long int* _zzq_ptr; \
559 _zzq_args[0] = (unsigned long int)(_zzq_request); \
560 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
561 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
562 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
563 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
564 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
565 _zzq_ptr = _zzq_args; \
566 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
567 "mr 4,%2\n\t" /*ptr*/ \
568 __SPECIAL_INSTRUCTION_PREAMBLE \
569 /* %R3 = client_request ( %R4 ) */ \
570 "or 1,1,1\n\t" \
571 "mr %0,3" /*result*/ \
572 : "=b" (_zzq_result) \
573 : "b" (_zzq_default), "b" (_zzq_ptr) \
574 : "cc", "memory", "r3", "r4"); \
575 _zzq_result; \
576 })
577
578#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
579 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
580 unsigned long int __addr; \
581 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
582 /* %R3 = guest_NRADDR */ \
583 "or 2,2,2\n\t" \
584 "mr %0,3" \
585 : "=b" (__addr) \
586 : \
587 : "cc", "memory", "r3" \
588 ); \
589 _zzq_orig->nraddr = __addr; \
590 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
591 /* %R3 = guest_NRADDR_GPR2 */ \
592 "or 4,4,4\n\t" \
593 "mr %0,3" \
594 : "=b" (__addr) \
595 : \
596 : "cc", "memory", "r3" \
597 ); \
598 _zzq_orig->r2 = __addr; \
599 }
600
601#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
602 __SPECIAL_INSTRUCTION_PREAMBLE \
603 /* branch-and-link-to-noredir *%R11 */ \
604 "or 3,3,3\n\t"
605
606#define VALGRIND_VEX_INJECT_IR() \
607 do { \
608 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
609 "or 5,5,5\n\t" \
610 ); \
611 } while (0)
612
613#endif /* PLAT_ppc64be_linux */
614
615#if defined(PLAT_ppc64le_linux)
616
617typedef
618 struct {
619 unsigned long int nraddr; /* where's the code? */
620 unsigned long int r2; /* what tocptr do we need? */
621 }
622 OrigFn;
623
624#define __SPECIAL_INSTRUCTION_PREAMBLE \
625 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
626 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
627
628#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
629 _zzq_default, _zzq_request, \
630 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
631 \
632 __extension__ \
633 ({ unsigned long int _zzq_args[6]; \
634 unsigned long int _zzq_result; \
635 unsigned long int* _zzq_ptr; \
636 _zzq_args[0] = (unsigned long int)(_zzq_request); \
637 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
638 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
639 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
640 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
641 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
642 _zzq_ptr = _zzq_args; \
643 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
644 "mr 4,%2\n\t" /*ptr*/ \
645 __SPECIAL_INSTRUCTION_PREAMBLE \
646 /* %R3 = client_request ( %R4 ) */ \
647 "or 1,1,1\n\t" \
648 "mr %0,3" /*result*/ \
649 : "=b" (_zzq_result) \
650 : "b" (_zzq_default), "b" (_zzq_ptr) \
651 : "cc", "memory", "r3", "r4"); \
652 _zzq_result; \
653 })
654
655#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
656 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
657 unsigned long int __addr; \
658 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
659 /* %R3 = guest_NRADDR */ \
660 "or 2,2,2\n\t" \
661 "mr %0,3" \
662 : "=b" (__addr) \
663 : \
664 : "cc", "memory", "r3" \
665 ); \
666 _zzq_orig->nraddr = __addr; \
667 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
668 /* %R3 = guest_NRADDR_GPR2 */ \
669 "or 4,4,4\n\t" \
670 "mr %0,3" \
671 : "=b" (__addr) \
672 : \
673 : "cc", "memory", "r3" \
674 ); \
675 _zzq_orig->r2 = __addr; \
676 }
677
678#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
679 __SPECIAL_INSTRUCTION_PREAMBLE \
680 /* branch-and-link-to-noredir *%R12 */ \
681 "or 3,3,3\n\t"
682
683#define VALGRIND_VEX_INJECT_IR() \
684 do { \
685 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
686 "or 5,5,5\n\t" \
687 ); \
688 } while (0)
689
690#endif /* PLAT_ppc64le_linux */
691
692/* ------------------------- arm-linux ------------------------- */
693
694#if defined(PLAT_arm_linux)
695
696typedef
697 struct {
698 unsigned int nraddr; /* where's the code? */
699 }
700 OrigFn;
701
702#define __SPECIAL_INSTRUCTION_PREAMBLE \
703 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
704 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
705
706#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
707 _zzq_default, _zzq_request, \
708 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
709 \
710 __extension__ \
711 ({volatile unsigned int _zzq_args[6]; \
712 volatile unsigned int _zzq_result; \
713 _zzq_args[0] = (unsigned int)(_zzq_request); \
714 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
715 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
716 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
717 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
718 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
719 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
720 "mov r4, %2\n\t" /*ptr*/ \
721 __SPECIAL_INSTRUCTION_PREAMBLE \
722 /* R3 = client_request ( R4 ) */ \
723 "orr r10, r10, r10\n\t" \
724 "mov %0, r3" /*result*/ \
725 : "=r" (_zzq_result) \
726 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
727 : "cc","memory", "r3", "r4"); \
728 _zzq_result; \
729 })
730
731#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
732 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
733 unsigned int __addr; \
734 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
735 /* R3 = guest_NRADDR */ \
736 "orr r11, r11, r11\n\t" \
737 "mov %0, r3" \
738 : "=r" (__addr) \
739 : \
740 : "cc", "memory", "r3" \
741 ); \
742 _zzq_orig->nraddr = __addr; \
743 }
744
745#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
746 __SPECIAL_INSTRUCTION_PREAMBLE \
747 /* branch-and-link-to-noredir *%R4 */ \
748 "orr r12, r12, r12\n\t"
749
750#define VALGRIND_VEX_INJECT_IR() \
751 do { \
752 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
753 "orr r9, r9, r9\n\t" \
754 : : : "cc", "memory" \
755 ); \
756 } while (0)
757
758#endif /* PLAT_arm_linux */
759
760/* ------------------------ arm64-linux ------------------------- */
761
762#if defined(PLAT_arm64_linux)
763
764typedef
765 struct {
766 unsigned long int nraddr; /* where's the code? */
767 }
768 OrigFn;
769
770#define __SPECIAL_INSTRUCTION_PREAMBLE \
771 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
772 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
773
774#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
775 _zzq_default, _zzq_request, \
776 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
777 \
778 __extension__ \
779 ({volatile unsigned long int _zzq_args[6]; \
780 volatile unsigned long int _zzq_result; \
781 _zzq_args[0] = (unsigned long int)(_zzq_request); \
782 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
783 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
784 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
785 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
786 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
787 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
788 "mov x4, %2\n\t" /*ptr*/ \
789 __SPECIAL_INSTRUCTION_PREAMBLE \
790 /* X3 = client_request ( X4 ) */ \
791 "orr x10, x10, x10\n\t" \
792 "mov %0, x3" /*result*/ \
793 : "=r" (_zzq_result) \
794 : "r" ((unsigned long int)(_zzq_default)), \
795 "r" (&_zzq_args[0]) \
796 : "cc","memory", "x3", "x4"); \
797 _zzq_result; \
798 })
799
800#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
801 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
802 unsigned long int __addr; \
803 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
804 /* X3 = guest_NRADDR */ \
805 "orr x11, x11, x11\n\t" \
806 "mov %0, x3" \
807 : "=r" (__addr) \
808 : \
809 : "cc", "memory", "x3" \
810 ); \
811 _zzq_orig->nraddr = __addr; \
812 }
813
814#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
815 __SPECIAL_INSTRUCTION_PREAMBLE \
816 /* branch-and-link-to-noredir X8 */ \
817 "orr x12, x12, x12\n\t"
818
819#define VALGRIND_VEX_INJECT_IR() \
820 do { \
821 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
822 "orr x9, x9, x9\n\t" \
823 : : : "cc", "memory" \
824 ); \
825 } while (0)
826
827#endif /* PLAT_arm64_linux */
828
829/* ------------------------ s390x-linux ------------------------ */
830
831#if defined(PLAT_s390x_linux)
832
833typedef
834 struct {
835 unsigned long int nraddr; /* where's the code? */
836 }
837 OrigFn;
838
839/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
840 * code. This detection is implemented in platform specific toIR.c
841 * (e.g. VEX/priv/guest_s390_decoder.c).
842 */
843#define __SPECIAL_INSTRUCTION_PREAMBLE \
844 "lr 15,15\n\t" \
845 "lr 1,1\n\t" \
846 "lr 2,2\n\t" \
847 "lr 3,3\n\t"
848
849#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
850#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
851#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
852#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
853
854#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
855 _zzq_default, _zzq_request, \
856 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
857 __extension__ \
858 ({volatile unsigned long int _zzq_args[6]; \
859 volatile unsigned long int _zzq_result; \
860 _zzq_args[0] = (unsigned long int)(_zzq_request); \
861 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
862 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
863 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
864 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
865 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
866 __asm__ volatile(/* r2 = args */ \
867 "lgr 2,%1\n\t" \
868 /* r3 = default */ \
869 "lgr 3,%2\n\t" \
870 __SPECIAL_INSTRUCTION_PREAMBLE \
871 __CLIENT_REQUEST_CODE \
872 /* results = r3 */ \
873 "lgr %0, 3\n\t" \
874 : "=d" (_zzq_result) \
875 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
876 : "cc", "2", "3", "memory" \
877 ); \
878 _zzq_result; \
879 })
880
881#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
882 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
883 volatile unsigned long int __addr; \
884 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
885 __GET_NR_CONTEXT_CODE \
886 "lgr %0, 3\n\t" \
887 : "=a" (__addr) \
888 : \
889 : "cc", "3", "memory" \
890 ); \
891 _zzq_orig->nraddr = __addr; \
892 }
893
894#define VALGRIND_CALL_NOREDIR_R1 \
895 __SPECIAL_INSTRUCTION_PREAMBLE \
896 __CALL_NO_REDIR_CODE
897
898#define VALGRIND_VEX_INJECT_IR() \
899 do { \
900 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
901 __VEX_INJECT_IR_CODE); \
902 } while (0)
903
904#endif /* PLAT_s390x_linux */
905
906/* ------------------------- mips32-linux ---------------- */
907
908#if defined(PLAT_mips32_linux)
909
910typedef
911 struct {
912 unsigned int nraddr; /* where's the code? */
913 }
914 OrigFn;
915
916/* .word 0x342
917 * .word 0x742
918 * .word 0xC2
919 * .word 0x4C2*/
920#define __SPECIAL_INSTRUCTION_PREAMBLE \
921 "srl $0, $0, 13\n\t" \
922 "srl $0, $0, 29\n\t" \
923 "srl $0, $0, 3\n\t" \
924 "srl $0, $0, 19\n\t"
925
926#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
927 _zzq_default, _zzq_request, \
928 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
929 __extension__ \
930 ({ volatile unsigned int _zzq_args[6]; \
931 volatile unsigned int _zzq_result; \
932 _zzq_args[0] = (unsigned int)(_zzq_request); \
933 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
934 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
935 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
936 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
937 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
938 __asm__ volatile("move $11, %1\n\t" /*default*/ \
939 "move $12, %2\n\t" /*ptr*/ \
940 __SPECIAL_INSTRUCTION_PREAMBLE \
941 /* T3 = client_request ( T4 ) */ \
942 "or $13, $13, $13\n\t" \
943 "move %0, $11\n\t" /*result*/ \
944 : "=r" (_zzq_result) \
945 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
946 : "$11", "$12", "memory"); \
947 _zzq_result; \
948 })
949
950#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
951 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
952 volatile unsigned int __addr; \
953 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
954 /* %t9 = guest_NRADDR */ \
955 "or $14, $14, $14\n\t" \
956 "move %0, $11" /*result*/ \
957 : "=r" (__addr) \
958 : \
959 : "$11" \
960 ); \
961 _zzq_orig->nraddr = __addr; \
962 }
963
964#define VALGRIND_CALL_NOREDIR_T9 \
965 __SPECIAL_INSTRUCTION_PREAMBLE \
966 /* call-noredir *%t9 */ \
967 "or $15, $15, $15\n\t"
968
969#define VALGRIND_VEX_INJECT_IR() \
970 do { \
971 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
972 "or $11, $11, $11\n\t" \
973 ); \
974 } while (0)
975
976
977#endif /* PLAT_mips32_linux */
978
979/* ------------------------- mips64-linux ---------------- */
980
981#if defined(PLAT_mips64_linux)
982
983typedef
984 struct {
985 unsigned long nraddr; /* where's the code? */
986 }
987 OrigFn;
988
989/* dsll $0,$0, 3
990 * dsll $0,$0, 13
991 * dsll $0,$0, 29
992 * dsll $0,$0, 19*/
993#define __SPECIAL_INSTRUCTION_PREAMBLE \
994 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
995 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
996
997#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
998 _zzq_default, _zzq_request, \
999 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1000 __extension__ \
1001 ({ volatile unsigned long int _zzq_args[6]; \
1002 volatile unsigned long int _zzq_result; \
1003 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1004 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1005 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1006 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1007 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1008 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1009 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1010 "move $12, %2\n\t" /*ptr*/ \
1011 __SPECIAL_INSTRUCTION_PREAMBLE \
1012 /* $11 = client_request ( $12 ) */ \
1013 "or $13, $13, $13\n\t" \
1014 "move %0, $11\n\t" /*result*/ \
1015 : "=r" (_zzq_result) \
1016 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1017 : "$11", "$12", "memory"); \
1018 _zzq_result; \
1019 })
1020
1021#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1022 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1023 volatile unsigned long int __addr; \
1024 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1025 /* $11 = guest_NRADDR */ \
1026 "or $14, $14, $14\n\t" \
1027 "move %0, $11" /*result*/ \
1028 : "=r" (__addr) \
1029 : \
1030 : "$11"); \
1031 _zzq_orig->nraddr = __addr; \
1032 }
1033
1034#define VALGRIND_CALL_NOREDIR_T9 \
1035 __SPECIAL_INSTRUCTION_PREAMBLE \
1036 /* call-noredir $25 */ \
1037 "or $15, $15, $15\n\t"
1038
1039#define VALGRIND_VEX_INJECT_IR() \
1040 do { \
1041 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1042 "or $11, $11, $11\n\t" \
1043 ); \
1044 } while (0)
1045
1046#endif /* PLAT_mips64_linux */
1047
1048/* Insert assembly code for other platforms here... */
1049
1050#endif /* NVALGRIND */
1051
1052
1053/* ------------------------------------------------------------------ */
1054/* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1055/* ugly. It's the least-worst tradeoff I can think of. */
1056/* ------------------------------------------------------------------ */
1057
1058/* This section defines magic (a.k.a appalling-hack) macros for doing
1059 guaranteed-no-redirection macros, so as to get from function
1060 wrappers to the functions they are wrapping. The whole point is to
1061 construct standard call sequences, but to do the call itself with a
1062 special no-redirect call pseudo-instruction that the JIT
1063 understands and handles specially. This section is long and
1064 repetitious, and I can't see a way to make it shorter.
1065
1066 The naming scheme is as follows:
1067
1068 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1069
1070 'W' stands for "word" and 'v' for "void". Hence there are
1071 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1072 and for each, the possibility of returning a word-typed result, or
1073 no result.
1074*/
1075
1076/* Use these to write the name of your wrapper. NOTE: duplicates
1077 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1078 the default behaviour equivalance class tag "0000" into the name.
1079 See pub_tool_redir.h for details -- normally you don't need to
1080 think about this, though. */
1081
1082/* Use an extra level of macroisation so as to ensure the soname/fnname
1083 args are fully macro-expanded before pasting them together. */
1084#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1085
1086#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1087 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1088
1089#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1090 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1091
1092/* Use this macro from within a wrapper function to collect the
1093 context (address and possibly other info) of the original function.
1094 Once you have that you can then use it in one of the CALL_FN_
1095 macros. The type of the argument _lval is OrigFn. */
1096#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1097
1098/* Also provide end-user facilities for function replacement, rather
1099 than wrapping. A replacement function differs from a wrapper in
1100 that it has no way to get hold of the original function being
1101 called, and hence no way to call onwards to it. In a replacement
1102 function, VALGRIND_GET_ORIG_FN always returns zero. */
1103
1104#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1105 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1106
1107#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1108 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1109
1110/* Derivatives of the main macros below, for calling functions
1111 returning void. */
1112
1113#define CALL_FN_v_v(fnptr) \
1114 do { volatile unsigned long _junk; \
1115 CALL_FN_W_v(_junk,fnptr); } while (0)
1116
1117#define CALL_FN_v_W(fnptr, arg1) \
1118 do { volatile unsigned long _junk; \
1119 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1120
1121#define CALL_FN_v_WW(fnptr, arg1,arg2) \
1122 do { volatile unsigned long _junk; \
1123 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1124
1125#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1126 do { volatile unsigned long _junk; \
1127 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1128
1129#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1130 do { volatile unsigned long _junk; \
1131 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1132
1133#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1134 do { volatile unsigned long _junk; \
1135 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1136
1137#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1138 do { volatile unsigned long _junk; \
1139 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1140
1141#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1142 do { volatile unsigned long _junk; \
1143 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1144
1145/* ----------------- x86-{linux,darwin,solaris} ---------------- */
1146
1147#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1148 || defined(PLAT_x86_solaris)
1149
1150/* These regs are trashed by the hidden call. No need to mention eax
1151 as gcc can already see that, plus causes gcc to bomb. */
1152#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1153
1154/* Macros to save and align the stack before making a function
1155 call and restore it afterwards as gcc may not keep the stack
1156 pointer aligned if it doesn't realise calls are being made
1157 to other functions. */
1158
1159#define VALGRIND_ALIGN_STACK \
1160 "movl %%esp,%%edi\n\t" \
1161 "andl $0xfffffff0,%%esp\n\t"
1162#define VALGRIND_RESTORE_STACK \
1163 "movl %%edi,%%esp\n\t"
1164
1165/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1166 long) == 4. */
1167
1168#define CALL_FN_W_v(lval, orig) \
1169 do { \
1170 volatile OrigFn _orig = (orig); \
1171 volatile unsigned long _argvec[1]; \
1172 volatile unsigned long _res; \
1173 _argvec[0] = (unsigned long)_orig.nraddr; \
1174 __asm__ volatile( \
1175 VALGRIND_ALIGN_STACK \
1176 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1177 VALGRIND_CALL_NOREDIR_EAX \
1178 VALGRIND_RESTORE_STACK \
1179 : /*out*/ "=a" (_res) \
1180 : /*in*/ "a" (&_argvec[0]) \
1181 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1182 ); \
1183 lval = (__typeof__(lval)) _res; \
1184 } while (0)
1185
1186#define CALL_FN_W_W(lval, orig, arg1) \
1187 do { \
1188 volatile OrigFn _orig = (orig); \
1189 volatile unsigned long _argvec[2]; \
1190 volatile unsigned long _res; \
1191 _argvec[0] = (unsigned long)_orig.nraddr; \
1192 _argvec[1] = (unsigned long)(arg1); \
1193 __asm__ volatile( \
1194 VALGRIND_ALIGN_STACK \
1195 "subl $12, %%esp\n\t" \
1196 "pushl 4(%%eax)\n\t" \
1197 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1198 VALGRIND_CALL_NOREDIR_EAX \
1199 VALGRIND_RESTORE_STACK \
1200 : /*out*/ "=a" (_res) \
1201 : /*in*/ "a" (&_argvec[0]) \
1202 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1203 ); \
1204 lval = (__typeof__(lval)) _res; \
1205 } while (0)
1206
1207#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1208 do { \
1209 volatile OrigFn _orig = (orig); \
1210 volatile unsigned long _argvec[3]; \
1211 volatile unsigned long _res; \
1212 _argvec[0] = (unsigned long)_orig.nraddr; \
1213 _argvec[1] = (unsigned long)(arg1); \
1214 _argvec[2] = (unsigned long)(arg2); \
1215 __asm__ volatile( \
1216 VALGRIND_ALIGN_STACK \
1217 "subl $8, %%esp\n\t" \
1218 "pushl 8(%%eax)\n\t" \
1219 "pushl 4(%%eax)\n\t" \
1220 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1221 VALGRIND_CALL_NOREDIR_EAX \
1222 VALGRIND_RESTORE_STACK \
1223 : /*out*/ "=a" (_res) \
1224 : /*in*/ "a" (&_argvec[0]) \
1225 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1226 ); \
1227 lval = (__typeof__(lval)) _res; \
1228 } while (0)
1229
1230#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1231 do { \
1232 volatile OrigFn _orig = (orig); \
1233 volatile unsigned long _argvec[4]; \
1234 volatile unsigned long _res; \
1235 _argvec[0] = (unsigned long)_orig.nraddr; \
1236 _argvec[1] = (unsigned long)(arg1); \
1237 _argvec[2] = (unsigned long)(arg2); \
1238 _argvec[3] = (unsigned long)(arg3); \
1239 __asm__ volatile( \
1240 VALGRIND_ALIGN_STACK \
1241 "subl $4, %%esp\n\t" \
1242 "pushl 12(%%eax)\n\t" \
1243 "pushl 8(%%eax)\n\t" \
1244 "pushl 4(%%eax)\n\t" \
1245 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1246 VALGRIND_CALL_NOREDIR_EAX \
1247 VALGRIND_RESTORE_STACK \
1248 : /*out*/ "=a" (_res) \
1249 : /*in*/ "a" (&_argvec[0]) \
1250 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1251 ); \
1252 lval = (__typeof__(lval)) _res; \
1253 } while (0)
1254
1255#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1256 do { \
1257 volatile OrigFn _orig = (orig); \
1258 volatile unsigned long _argvec[5]; \
1259 volatile unsigned long _res; \
1260 _argvec[0] = (unsigned long)_orig.nraddr; \
1261 _argvec[1] = (unsigned long)(arg1); \
1262 _argvec[2] = (unsigned long)(arg2); \
1263 _argvec[3] = (unsigned long)(arg3); \
1264 _argvec[4] = (unsigned long)(arg4); \
1265 __asm__ volatile( \
1266 VALGRIND_ALIGN_STACK \
1267 "pushl 16(%%eax)\n\t" \
1268 "pushl 12(%%eax)\n\t" \
1269 "pushl 8(%%eax)\n\t" \
1270 "pushl 4(%%eax)\n\t" \
1271 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1272 VALGRIND_CALL_NOREDIR_EAX \
1273 VALGRIND_RESTORE_STACK \
1274 : /*out*/ "=a" (_res) \
1275 : /*in*/ "a" (&_argvec[0]) \
1276 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1277 ); \
1278 lval = (__typeof__(lval)) _res; \
1279 } while (0)
1280
1281#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1282 do { \
1283 volatile OrigFn _orig = (orig); \
1284 volatile unsigned long _argvec[6]; \
1285 volatile unsigned long _res; \
1286 _argvec[0] = (unsigned long)_orig.nraddr; \
1287 _argvec[1] = (unsigned long)(arg1); \
1288 _argvec[2] = (unsigned long)(arg2); \
1289 _argvec[3] = (unsigned long)(arg3); \
1290 _argvec[4] = (unsigned long)(arg4); \
1291 _argvec[5] = (unsigned long)(arg5); \
1292 __asm__ volatile( \
1293 VALGRIND_ALIGN_STACK \
1294 "subl $12, %%esp\n\t" \
1295 "pushl 20(%%eax)\n\t" \
1296 "pushl 16(%%eax)\n\t" \
1297 "pushl 12(%%eax)\n\t" \
1298 "pushl 8(%%eax)\n\t" \
1299 "pushl 4(%%eax)\n\t" \
1300 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1301 VALGRIND_CALL_NOREDIR_EAX \
1302 VALGRIND_RESTORE_STACK \
1303 : /*out*/ "=a" (_res) \
1304 : /*in*/ "a" (&_argvec[0]) \
1305 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1306 ); \
1307 lval = (__typeof__(lval)) _res; \
1308 } while (0)
1309
1310#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1311 do { \
1312 volatile OrigFn _orig = (orig); \
1313 volatile unsigned long _argvec[7]; \
1314 volatile unsigned long _res; \
1315 _argvec[0] = (unsigned long)_orig.nraddr; \
1316 _argvec[1] = (unsigned long)(arg1); \
1317 _argvec[2] = (unsigned long)(arg2); \
1318 _argvec[3] = (unsigned long)(arg3); \
1319 _argvec[4] = (unsigned long)(arg4); \
1320 _argvec[5] = (unsigned long)(arg5); \
1321 _argvec[6] = (unsigned long)(arg6); \
1322 __asm__ volatile( \
1323 VALGRIND_ALIGN_STACK \
1324 "subl $8, %%esp\n\t" \
1325 "pushl 24(%%eax)\n\t" \
1326 "pushl 20(%%eax)\n\t" \
1327 "pushl 16(%%eax)\n\t" \
1328 "pushl 12(%%eax)\n\t" \
1329 "pushl 8(%%eax)\n\t" \
1330 "pushl 4(%%eax)\n\t" \
1331 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1332 VALGRIND_CALL_NOREDIR_EAX \
1333 VALGRIND_RESTORE_STACK \
1334 : /*out*/ "=a" (_res) \
1335 : /*in*/ "a" (&_argvec[0]) \
1336 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1337 ); \
1338 lval = (__typeof__(lval)) _res; \
1339 } while (0)
1340
1341#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1342 arg7) \
1343 do { \
1344 volatile OrigFn _orig = (orig); \
1345 volatile unsigned long _argvec[8]; \
1346 volatile unsigned long _res; \
1347 _argvec[0] = (unsigned long)_orig.nraddr; \
1348 _argvec[1] = (unsigned long)(arg1); \
1349 _argvec[2] = (unsigned long)(arg2); \
1350 _argvec[3] = (unsigned long)(arg3); \
1351 _argvec[4] = (unsigned long)(arg4); \
1352 _argvec[5] = (unsigned long)(arg5); \
1353 _argvec[6] = (unsigned long)(arg6); \
1354 _argvec[7] = (unsigned long)(arg7); \
1355 __asm__ volatile( \
1356 VALGRIND_ALIGN_STACK \
1357 "subl $4, %%esp\n\t" \
1358 "pushl 28(%%eax)\n\t" \
1359 "pushl 24(%%eax)\n\t" \
1360 "pushl 20(%%eax)\n\t" \
1361 "pushl 16(%%eax)\n\t" \
1362 "pushl 12(%%eax)\n\t" \
1363 "pushl 8(%%eax)\n\t" \
1364 "pushl 4(%%eax)\n\t" \
1365 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1366 VALGRIND_CALL_NOREDIR_EAX \
1367 VALGRIND_RESTORE_STACK \
1368 : /*out*/ "=a" (_res) \
1369 : /*in*/ "a" (&_argvec[0]) \
1370 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1371 ); \
1372 lval = (__typeof__(lval)) _res; \
1373 } while (0)
1374
1375#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1376 arg7,arg8) \
1377 do { \
1378 volatile OrigFn _orig = (orig); \
1379 volatile unsigned long _argvec[9]; \
1380 volatile unsigned long _res; \
1381 _argvec[0] = (unsigned long)_orig.nraddr; \
1382 _argvec[1] = (unsigned long)(arg1); \
1383 _argvec[2] = (unsigned long)(arg2); \
1384 _argvec[3] = (unsigned long)(arg3); \
1385 _argvec[4] = (unsigned long)(arg4); \
1386 _argvec[5] = (unsigned long)(arg5); \
1387 _argvec[6] = (unsigned long)(arg6); \
1388 _argvec[7] = (unsigned long)(arg7); \
1389 _argvec[8] = (unsigned long)(arg8); \
1390 __asm__ volatile( \
1391 VALGRIND_ALIGN_STACK \
1392 "pushl 32(%%eax)\n\t" \
1393 "pushl 28(%%eax)\n\t" \
1394 "pushl 24(%%eax)\n\t" \
1395 "pushl 20(%%eax)\n\t" \
1396 "pushl 16(%%eax)\n\t" \
1397 "pushl 12(%%eax)\n\t" \
1398 "pushl 8(%%eax)\n\t" \
1399 "pushl 4(%%eax)\n\t" \
1400 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1401 VALGRIND_CALL_NOREDIR_EAX \
1402 VALGRIND_RESTORE_STACK \
1403 : /*out*/ "=a" (_res) \
1404 : /*in*/ "a" (&_argvec[0]) \
1405 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1406 ); \
1407 lval = (__typeof__(lval)) _res; \
1408 } while (0)
1409
1410#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1411 arg7,arg8,arg9) \
1412 do { \
1413 volatile OrigFn _orig = (orig); \
1414 volatile unsigned long _argvec[10]; \
1415 volatile unsigned long _res; \
1416 _argvec[0] = (unsigned long)_orig.nraddr; \
1417 _argvec[1] = (unsigned long)(arg1); \
1418 _argvec[2] = (unsigned long)(arg2); \
1419 _argvec[3] = (unsigned long)(arg3); \
1420 _argvec[4] = (unsigned long)(arg4); \
1421 _argvec[5] = (unsigned long)(arg5); \
1422 _argvec[6] = (unsigned long)(arg6); \
1423 _argvec[7] = (unsigned long)(arg7); \
1424 _argvec[8] = (unsigned long)(arg8); \
1425 _argvec[9] = (unsigned long)(arg9); \
1426 __asm__ volatile( \
1427 VALGRIND_ALIGN_STACK \
1428 "subl $12, %%esp\n\t" \
1429 "pushl 36(%%eax)\n\t" \
1430 "pushl 32(%%eax)\n\t" \
1431 "pushl 28(%%eax)\n\t" \
1432 "pushl 24(%%eax)\n\t" \
1433 "pushl 20(%%eax)\n\t" \
1434 "pushl 16(%%eax)\n\t" \
1435 "pushl 12(%%eax)\n\t" \
1436 "pushl 8(%%eax)\n\t" \
1437 "pushl 4(%%eax)\n\t" \
1438 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1439 VALGRIND_CALL_NOREDIR_EAX \
1440 VALGRIND_RESTORE_STACK \
1441 : /*out*/ "=a" (_res) \
1442 : /*in*/ "a" (&_argvec[0]) \
1443 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1444 ); \
1445 lval = (__typeof__(lval)) _res; \
1446 } while (0)
1447
1448#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1449 arg7,arg8,arg9,arg10) \
1450 do { \
1451 volatile OrigFn _orig = (orig); \
1452 volatile unsigned long _argvec[11]; \
1453 volatile unsigned long _res; \
1454 _argvec[0] = (unsigned long)_orig.nraddr; \
1455 _argvec[1] = (unsigned long)(arg1); \
1456 _argvec[2] = (unsigned long)(arg2); \
1457 _argvec[3] = (unsigned long)(arg3); \
1458 _argvec[4] = (unsigned long)(arg4); \
1459 _argvec[5] = (unsigned long)(arg5); \
1460 _argvec[6] = (unsigned long)(arg6); \
1461 _argvec[7] = (unsigned long)(arg7); \
1462 _argvec[8] = (unsigned long)(arg8); \
1463 _argvec[9] = (unsigned long)(arg9); \
1464 _argvec[10] = (unsigned long)(arg10); \
1465 __asm__ volatile( \
1466 VALGRIND_ALIGN_STACK \
1467 "subl $8, %%esp\n\t" \
1468 "pushl 40(%%eax)\n\t" \
1469 "pushl 36(%%eax)\n\t" \
1470 "pushl 32(%%eax)\n\t" \
1471 "pushl 28(%%eax)\n\t" \
1472 "pushl 24(%%eax)\n\t" \
1473 "pushl 20(%%eax)\n\t" \
1474 "pushl 16(%%eax)\n\t" \
1475 "pushl 12(%%eax)\n\t" \
1476 "pushl 8(%%eax)\n\t" \
1477 "pushl 4(%%eax)\n\t" \
1478 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1479 VALGRIND_CALL_NOREDIR_EAX \
1480 VALGRIND_RESTORE_STACK \
1481 : /*out*/ "=a" (_res) \
1482 : /*in*/ "a" (&_argvec[0]) \
1483 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1484 ); \
1485 lval = (__typeof__(lval)) _res; \
1486 } while (0)
1487
1488#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1489 arg6,arg7,arg8,arg9,arg10, \
1490 arg11) \
1491 do { \
1492 volatile OrigFn _orig = (orig); \
1493 volatile unsigned long _argvec[12]; \
1494 volatile unsigned long _res; \
1495 _argvec[0] = (unsigned long)_orig.nraddr; \
1496 _argvec[1] = (unsigned long)(arg1); \
1497 _argvec[2] = (unsigned long)(arg2); \
1498 _argvec[3] = (unsigned long)(arg3); \
1499 _argvec[4] = (unsigned long)(arg4); \
1500 _argvec[5] = (unsigned long)(arg5); \
1501 _argvec[6] = (unsigned long)(arg6); \
1502 _argvec[7] = (unsigned long)(arg7); \
1503 _argvec[8] = (unsigned long)(arg8); \
1504 _argvec[9] = (unsigned long)(arg9); \
1505 _argvec[10] = (unsigned long)(arg10); \
1506 _argvec[11] = (unsigned long)(arg11); \
1507 __asm__ volatile( \
1508 VALGRIND_ALIGN_STACK \
1509 "subl $4, %%esp\n\t" \
1510 "pushl 44(%%eax)\n\t" \
1511 "pushl 40(%%eax)\n\t" \
1512 "pushl 36(%%eax)\n\t" \
1513 "pushl 32(%%eax)\n\t" \
1514 "pushl 28(%%eax)\n\t" \
1515 "pushl 24(%%eax)\n\t" \
1516 "pushl 20(%%eax)\n\t" \
1517 "pushl 16(%%eax)\n\t" \
1518 "pushl 12(%%eax)\n\t" \
1519 "pushl 8(%%eax)\n\t" \
1520 "pushl 4(%%eax)\n\t" \
1521 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1522 VALGRIND_CALL_NOREDIR_EAX \
1523 VALGRIND_RESTORE_STACK \
1524 : /*out*/ "=a" (_res) \
1525 : /*in*/ "a" (&_argvec[0]) \
1526 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1527 ); \
1528 lval = (__typeof__(lval)) _res; \
1529 } while (0)
1530
1531#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1532 arg6,arg7,arg8,arg9,arg10, \
1533 arg11,arg12) \
1534 do { \
1535 volatile OrigFn _orig = (orig); \
1536 volatile unsigned long _argvec[13]; \
1537 volatile unsigned long _res; \
1538 _argvec[0] = (unsigned long)_orig.nraddr; \
1539 _argvec[1] = (unsigned long)(arg1); \
1540 _argvec[2] = (unsigned long)(arg2); \
1541 _argvec[3] = (unsigned long)(arg3); \
1542 _argvec[4] = (unsigned long)(arg4); \
1543 _argvec[5] = (unsigned long)(arg5); \
1544 _argvec[6] = (unsigned long)(arg6); \
1545 _argvec[7] = (unsigned long)(arg7); \
1546 _argvec[8] = (unsigned long)(arg8); \
1547 _argvec[9] = (unsigned long)(arg9); \
1548 _argvec[10] = (unsigned long)(arg10); \
1549 _argvec[11] = (unsigned long)(arg11); \
1550 _argvec[12] = (unsigned long)(arg12); \
1551 __asm__ volatile( \
1552 VALGRIND_ALIGN_STACK \
1553 "pushl 48(%%eax)\n\t" \
1554 "pushl 44(%%eax)\n\t" \
1555 "pushl 40(%%eax)\n\t" \
1556 "pushl 36(%%eax)\n\t" \
1557 "pushl 32(%%eax)\n\t" \
1558 "pushl 28(%%eax)\n\t" \
1559 "pushl 24(%%eax)\n\t" \
1560 "pushl 20(%%eax)\n\t" \
1561 "pushl 16(%%eax)\n\t" \
1562 "pushl 12(%%eax)\n\t" \
1563 "pushl 8(%%eax)\n\t" \
1564 "pushl 4(%%eax)\n\t" \
1565 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1566 VALGRIND_CALL_NOREDIR_EAX \
1567 VALGRIND_RESTORE_STACK \
1568 : /*out*/ "=a" (_res) \
1569 : /*in*/ "a" (&_argvec[0]) \
1570 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1571 ); \
1572 lval = (__typeof__(lval)) _res; \
1573 } while (0)
1574
1575#endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1576
1577/* ---------------- amd64-{linux,darwin,solaris} --------------- */
1578
1579#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1580 || defined(PLAT_amd64_solaris)
1581
1582/* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1583
1584/* These regs are trashed by the hidden call. */
1585#define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1586 "rdi", "r8", "r9", "r10", "r11"
1587
1588/* This is all pretty complex. It's so as to make stack unwinding
1589 work reliably. See bug 243270. The basic problem is the sub and
1590 add of 128 of %rsp in all of the following macros. If gcc believes
1591 the CFA is in %rsp, then unwinding may fail, because what's at the
1592 CFA is not what gcc "expected" when it constructs the CFIs for the
1593 places where the macros are instantiated.
1594
1595 But we can't just add a CFI annotation to increase the CFA offset
1596 by 128, to match the sub of 128 from %rsp, because we don't know
1597 whether gcc has chosen %rsp as the CFA at that point, or whether it
1598 has chosen some other register (eg, %rbp). In the latter case,
1599 adding a CFI annotation to change the CFA offset is simply wrong.
1600
1601 So the solution is to get hold of the CFA using
1602 __builtin_dwarf_cfa(), put it in a known register, and add a
1603 CFI annotation to say what the register is. We choose %rbp for
1604 this (perhaps perversely), because:
1605
1606 (1) %rbp is already subject to unwinding. If a new register was
1607 chosen then the unwinder would have to unwind it in all stack
1608 traces, which is expensive, and
1609
1610 (2) %rbp is already subject to precise exception updates in the
1611 JIT. If a new register was chosen, we'd have to have precise
1612 exceptions for it too, which reduces performance of the
1613 generated code.
1614
1615 However .. one extra complication. We can't just whack the result
1616 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1617 list of trashed registers at the end of the inline assembly
1618 fragments; gcc won't allow %rbp to appear in that list. Hence
1619 instead we need to stash %rbp in %r15 for the duration of the asm,
1620 and say that %r15 is trashed instead. gcc seems happy to go with
1621 that.
1622
1623 Oh .. and this all needs to be conditionalised so that it is
1624 unchanged from before this commit, when compiled with older gccs
1625 that don't support __builtin_dwarf_cfa. Furthermore, since
1626 this header file is freestanding, it has to be independent of
1627 config.h, and so the following conditionalisation cannot depend on
1628 configure time checks.
1629
1630 Although it's not clear from
1631 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1632 this expression excludes Darwin.
1633 .cfi directives in Darwin assembly appear to be completely
1634 different and I haven't investigated how they work.
1635
1636 For even more entertainment value, note we have to use the
1637 completely undocumented __builtin_dwarf_cfa(), which appears to
1638 really compute the CFA, whereas __builtin_frame_address(0) claims
1639 to but actually doesn't. See
1640 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1641*/
1642#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1643# define __FRAME_POINTER \
1644 ,"r"(__builtin_dwarf_cfa())
1645# define VALGRIND_CFI_PROLOGUE \
1646 "movq %%rbp, %%r15\n\t" \
1647 "movq %2, %%rbp\n\t" \
1648 ".cfi_remember_state\n\t" \
1649 ".cfi_def_cfa rbp, 0\n\t"
1650# define VALGRIND_CFI_EPILOGUE \
1651 "movq %%r15, %%rbp\n\t" \
1652 ".cfi_restore_state\n\t"
1653#else
1654# define __FRAME_POINTER
1655# define VALGRIND_CFI_PROLOGUE
1656# define VALGRIND_CFI_EPILOGUE
1657#endif
1658
1659/* Macros to save and align the stack before making a function
1660 call and restore it afterwards as gcc may not keep the stack
1661 pointer aligned if it doesn't realise calls are being made
1662 to other functions. */
1663
1664#define VALGRIND_ALIGN_STACK \
1665 "movq %%rsp,%%r14\n\t" \
1666 "andq $0xfffffffffffffff0,%%rsp\n\t"
1667#define VALGRIND_RESTORE_STACK \
1668 "movq %%r14,%%rsp\n\t"
1669
1670/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1671 long) == 8. */
1672
1673/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1674 macros. In order not to trash the stack redzone, we need to drop
1675 %rsp by 128 before the hidden call, and restore afterwards. The
1676 nastyness is that it is only by luck that the stack still appears
1677 to be unwindable during the hidden call - since then the behaviour
1678 of any routine using this macro does not match what the CFI data
1679 says. Sigh.
1680
1681 Why is this important? Imagine that a wrapper has a stack
1682 allocated local, and passes to the hidden call, a pointer to it.
1683 Because gcc does not know about the hidden call, it may allocate
1684 that local in the redzone. Unfortunately the hidden call may then
1685 trash it before it comes to use it. So we must step clear of the
1686 redzone, for the duration of the hidden call, to make it safe.
1687
1688 Probably the same problem afflicts the other redzone-style ABIs too
1689 (ppc64-linux); but for those, the stack is
1690 self describing (none of this CFI nonsense) so at least messing
1691 with the stack pointer doesn't give a danger of non-unwindable
1692 stack. */
1693
1694#define CALL_FN_W_v(lval, orig) \
1695 do { \
1696 volatile OrigFn _orig = (orig); \
1697 volatile unsigned long _argvec[1]; \
1698 volatile unsigned long _res; \
1699 _argvec[0] = (unsigned long)_orig.nraddr; \
1700 __asm__ volatile( \
1701 VALGRIND_CFI_PROLOGUE \
1702 VALGRIND_ALIGN_STACK \
1703 "subq $128,%%rsp\n\t" \
1704 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1705 VALGRIND_CALL_NOREDIR_RAX \
1706 VALGRIND_RESTORE_STACK \
1707 VALGRIND_CFI_EPILOGUE \
1708 : /*out*/ "=a" (_res) \
1709 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1710 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1711 ); \
1712 lval = (__typeof__(lval)) _res; \
1713 } while (0)
1714
1715#define CALL_FN_W_W(lval, orig, arg1) \
1716 do { \
1717 volatile OrigFn _orig = (orig); \
1718 volatile unsigned long _argvec[2]; \
1719 volatile unsigned long _res; \
1720 _argvec[0] = (unsigned long)_orig.nraddr; \
1721 _argvec[1] = (unsigned long)(arg1); \
1722 __asm__ volatile( \
1723 VALGRIND_CFI_PROLOGUE \
1724 VALGRIND_ALIGN_STACK \
1725 "subq $128,%%rsp\n\t" \
1726 "movq 8(%%rax), %%rdi\n\t" \
1727 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1728 VALGRIND_CALL_NOREDIR_RAX \
1729 VALGRIND_RESTORE_STACK \
1730 VALGRIND_CFI_EPILOGUE \
1731 : /*out*/ "=a" (_res) \
1732 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1733 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1734 ); \
1735 lval = (__typeof__(lval)) _res; \
1736 } while (0)
1737
1738#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1739 do { \
1740 volatile OrigFn _orig = (orig); \
1741 volatile unsigned long _argvec[3]; \
1742 volatile unsigned long _res; \
1743 _argvec[0] = (unsigned long)_orig.nraddr; \
1744 _argvec[1] = (unsigned long)(arg1); \
1745 _argvec[2] = (unsigned long)(arg2); \
1746 __asm__ volatile( \
1747 VALGRIND_CFI_PROLOGUE \
1748 VALGRIND_ALIGN_STACK \
1749 "subq $128,%%rsp\n\t" \
1750 "movq 16(%%rax), %%rsi\n\t" \
1751 "movq 8(%%rax), %%rdi\n\t" \
1752 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1753 VALGRIND_CALL_NOREDIR_RAX \
1754 VALGRIND_RESTORE_STACK \
1755 VALGRIND_CFI_EPILOGUE \
1756 : /*out*/ "=a" (_res) \
1757 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1758 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1759 ); \
1760 lval = (__typeof__(lval)) _res; \
1761 } while (0)
1762
1763#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1764 do { \
1765 volatile OrigFn _orig = (orig); \
1766 volatile unsigned long _argvec[4]; \
1767 volatile unsigned long _res; \
1768 _argvec[0] = (unsigned long)_orig.nraddr; \
1769 _argvec[1] = (unsigned long)(arg1); \
1770 _argvec[2] = (unsigned long)(arg2); \
1771 _argvec[3] = (unsigned long)(arg3); \
1772 __asm__ volatile( \
1773 VALGRIND_CFI_PROLOGUE \
1774 VALGRIND_ALIGN_STACK \
1775 "subq $128,%%rsp\n\t" \
1776 "movq 24(%%rax), %%rdx\n\t" \
1777 "movq 16(%%rax), %%rsi\n\t" \
1778 "movq 8(%%rax), %%rdi\n\t" \
1779 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1780 VALGRIND_CALL_NOREDIR_RAX \
1781 VALGRIND_RESTORE_STACK \
1782 VALGRIND_CFI_EPILOGUE \
1783 : /*out*/ "=a" (_res) \
1784 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1785 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1786 ); \
1787 lval = (__typeof__(lval)) _res; \
1788 } while (0)
1789
1790#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1791 do { \
1792 volatile OrigFn _orig = (orig); \
1793 volatile unsigned long _argvec[5]; \
1794 volatile unsigned long _res; \
1795 _argvec[0] = (unsigned long)_orig.nraddr; \
1796 _argvec[1] = (unsigned long)(arg1); \
1797 _argvec[2] = (unsigned long)(arg2); \
1798 _argvec[3] = (unsigned long)(arg3); \
1799 _argvec[4] = (unsigned long)(arg4); \
1800 __asm__ volatile( \
1801 VALGRIND_CFI_PROLOGUE \
1802 VALGRIND_ALIGN_STACK \
1803 "subq $128,%%rsp\n\t" \
1804 "movq 32(%%rax), %%rcx\n\t" \
1805 "movq 24(%%rax), %%rdx\n\t" \
1806 "movq 16(%%rax), %%rsi\n\t" \
1807 "movq 8(%%rax), %%rdi\n\t" \
1808 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1809 VALGRIND_CALL_NOREDIR_RAX \
1810 VALGRIND_RESTORE_STACK \
1811 VALGRIND_CFI_EPILOGUE \
1812 : /*out*/ "=a" (_res) \
1813 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1814 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1815 ); \
1816 lval = (__typeof__(lval)) _res; \
1817 } while (0)
1818
1819#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1820 do { \
1821 volatile OrigFn _orig = (orig); \
1822 volatile unsigned long _argvec[6]; \
1823 volatile unsigned long _res; \
1824 _argvec[0] = (unsigned long)_orig.nraddr; \
1825 _argvec[1] = (unsigned long)(arg1); \
1826 _argvec[2] = (unsigned long)(arg2); \
1827 _argvec[3] = (unsigned long)(arg3); \
1828 _argvec[4] = (unsigned long)(arg4); \
1829 _argvec[5] = (unsigned long)(arg5); \
1830 __asm__ volatile( \
1831 VALGRIND_CFI_PROLOGUE \
1832 VALGRIND_ALIGN_STACK \
1833 "subq $128,%%rsp\n\t" \
1834 "movq 40(%%rax), %%r8\n\t" \
1835 "movq 32(%%rax), %%rcx\n\t" \
1836 "movq 24(%%rax), %%rdx\n\t" \
1837 "movq 16(%%rax), %%rsi\n\t" \
1838 "movq 8(%%rax), %%rdi\n\t" \
1839 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1840 VALGRIND_CALL_NOREDIR_RAX \
1841 VALGRIND_RESTORE_STACK \
1842 VALGRIND_CFI_EPILOGUE \
1843 : /*out*/ "=a" (_res) \
1844 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1845 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1846 ); \
1847 lval = (__typeof__(lval)) _res; \
1848 } while (0)
1849
1850#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1851 do { \
1852 volatile OrigFn _orig = (orig); \
1853 volatile unsigned long _argvec[7]; \
1854 volatile unsigned long _res; \
1855 _argvec[0] = (unsigned long)_orig.nraddr; \
1856 _argvec[1] = (unsigned long)(arg1); \
1857 _argvec[2] = (unsigned long)(arg2); \
1858 _argvec[3] = (unsigned long)(arg3); \
1859 _argvec[4] = (unsigned long)(arg4); \
1860 _argvec[5] = (unsigned long)(arg5); \
1861 _argvec[6] = (unsigned long)(arg6); \
1862 __asm__ volatile( \
1863 VALGRIND_CFI_PROLOGUE \
1864 VALGRIND_ALIGN_STACK \
1865 "subq $128,%%rsp\n\t" \
1866 "movq 48(%%rax), %%r9\n\t" \
1867 "movq 40(%%rax), %%r8\n\t" \
1868 "movq 32(%%rax), %%rcx\n\t" \
1869 "movq 24(%%rax), %%rdx\n\t" \
1870 "movq 16(%%rax), %%rsi\n\t" \
1871 "movq 8(%%rax), %%rdi\n\t" \
1872 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1873 VALGRIND_CALL_NOREDIR_RAX \
1874 VALGRIND_RESTORE_STACK \
1875 VALGRIND_CFI_EPILOGUE \
1876 : /*out*/ "=a" (_res) \
1877 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1878 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1879 ); \
1880 lval = (__typeof__(lval)) _res; \
1881 } while (0)
1882
1883#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1884 arg7) \
1885 do { \
1886 volatile OrigFn _orig = (orig); \
1887 volatile unsigned long _argvec[8]; \
1888 volatile unsigned long _res; \
1889 _argvec[0] = (unsigned long)_orig.nraddr; \
1890 _argvec[1] = (unsigned long)(arg1); \
1891 _argvec[2] = (unsigned long)(arg2); \
1892 _argvec[3] = (unsigned long)(arg3); \
1893 _argvec[4] = (unsigned long)(arg4); \
1894 _argvec[5] = (unsigned long)(arg5); \
1895 _argvec[6] = (unsigned long)(arg6); \
1896 _argvec[7] = (unsigned long)(arg7); \
1897 __asm__ volatile( \
1898 VALGRIND_CFI_PROLOGUE \
1899 VALGRIND_ALIGN_STACK \
1900 "subq $136,%%rsp\n\t" \
1901 "pushq 56(%%rax)\n\t" \
1902 "movq 48(%%rax), %%r9\n\t" \
1903 "movq 40(%%rax), %%r8\n\t" \
1904 "movq 32(%%rax), %%rcx\n\t" \
1905 "movq 24(%%rax), %%rdx\n\t" \
1906 "movq 16(%%rax), %%rsi\n\t" \
1907 "movq 8(%%rax), %%rdi\n\t" \
1908 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1909 VALGRIND_CALL_NOREDIR_RAX \
1910 VALGRIND_RESTORE_STACK \
1911 VALGRIND_CFI_EPILOGUE \
1912 : /*out*/ "=a" (_res) \
1913 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1914 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1915 ); \
1916 lval = (__typeof__(lval)) _res; \
1917 } while (0)
1918
1919#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1920 arg7,arg8) \
1921 do { \
1922 volatile OrigFn _orig = (orig); \
1923 volatile unsigned long _argvec[9]; \
1924 volatile unsigned long _res; \
1925 _argvec[0] = (unsigned long)_orig.nraddr; \
1926 _argvec[1] = (unsigned long)(arg1); \
1927 _argvec[2] = (unsigned long)(arg2); \
1928 _argvec[3] = (unsigned long)(arg3); \
1929 _argvec[4] = (unsigned long)(arg4); \
1930 _argvec[5] = (unsigned long)(arg5); \
1931 _argvec[6] = (unsigned long)(arg6); \
1932 _argvec[7] = (unsigned long)(arg7); \
1933 _argvec[8] = (unsigned long)(arg8); \
1934 __asm__ volatile( \
1935 VALGRIND_CFI_PROLOGUE \
1936 VALGRIND_ALIGN_STACK \
1937 "subq $128,%%rsp\n\t" \
1938 "pushq 64(%%rax)\n\t" \
1939 "pushq 56(%%rax)\n\t" \
1940 "movq 48(%%rax), %%r9\n\t" \
1941 "movq 40(%%rax), %%r8\n\t" \
1942 "movq 32(%%rax), %%rcx\n\t" \
1943 "movq 24(%%rax), %%rdx\n\t" \
1944 "movq 16(%%rax), %%rsi\n\t" \
1945 "movq 8(%%rax), %%rdi\n\t" \
1946 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1947 VALGRIND_CALL_NOREDIR_RAX \
1948 VALGRIND_RESTORE_STACK \
1949 VALGRIND_CFI_EPILOGUE \
1950 : /*out*/ "=a" (_res) \
1951 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1952 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1953 ); \
1954 lval = (__typeof__(lval)) _res; \
1955 } while (0)
1956
1957#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1958 arg7,arg8,arg9) \
1959 do { \
1960 volatile OrigFn _orig = (orig); \
1961 volatile unsigned long _argvec[10]; \
1962 volatile unsigned long _res; \
1963 _argvec[0] = (unsigned long)_orig.nraddr; \
1964 _argvec[1] = (unsigned long)(arg1); \
1965 _argvec[2] = (unsigned long)(arg2); \
1966 _argvec[3] = (unsigned long)(arg3); \
1967 _argvec[4] = (unsigned long)(arg4); \
1968 _argvec[5] = (unsigned long)(arg5); \
1969 _argvec[6] = (unsigned long)(arg6); \
1970 _argvec[7] = (unsigned long)(arg7); \
1971 _argvec[8] = (unsigned long)(arg8); \
1972 _argvec[9] = (unsigned long)(arg9); \
1973 __asm__ volatile( \
1974 VALGRIND_CFI_PROLOGUE \
1975 VALGRIND_ALIGN_STACK \
1976 "subq $136,%%rsp\n\t" \
1977 "pushq 72(%%rax)\n\t" \
1978 "pushq 64(%%rax)\n\t" \
1979 "pushq 56(%%rax)\n\t" \
1980 "movq 48(%%rax), %%r9\n\t" \
1981 "movq 40(%%rax), %%r8\n\t" \
1982 "movq 32(%%rax), %%rcx\n\t" \
1983 "movq 24(%%rax), %%rdx\n\t" \
1984 "movq 16(%%rax), %%rsi\n\t" \
1985 "movq 8(%%rax), %%rdi\n\t" \
1986 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1987 VALGRIND_CALL_NOREDIR_RAX \
1988 VALGRIND_RESTORE_STACK \
1989 VALGRIND_CFI_EPILOGUE \
1990 : /*out*/ "=a" (_res) \
1991 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1992 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1993 ); \
1994 lval = (__typeof__(lval)) _res; \
1995 } while (0)
1996
1997#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1998 arg7,arg8,arg9,arg10) \
1999 do { \
2000 volatile OrigFn _orig = (orig); \
2001 volatile unsigned long _argvec[11]; \
2002 volatile unsigned long _res; \
2003 _argvec[0] = (unsigned long)_orig.nraddr; \
2004 _argvec[1] = (unsigned long)(arg1); \
2005 _argvec[2] = (unsigned long)(arg2); \
2006 _argvec[3] = (unsigned long)(arg3); \
2007 _argvec[4] = (unsigned long)(arg4); \
2008 _argvec[5] = (unsigned long)(arg5); \
2009 _argvec[6] = (unsigned long)(arg6); \
2010 _argvec[7] = (unsigned long)(arg7); \
2011 _argvec[8] = (unsigned long)(arg8); \
2012 _argvec[9] = (unsigned long)(arg9); \
2013 _argvec[10] = (unsigned long)(arg10); \
2014 __asm__ volatile( \
2015 VALGRIND_CFI_PROLOGUE \
2016 VALGRIND_ALIGN_STACK \
2017 "subq $128,%%rsp\n\t" \
2018 "pushq 80(%%rax)\n\t" \
2019 "pushq 72(%%rax)\n\t" \
2020 "pushq 64(%%rax)\n\t" \
2021 "pushq 56(%%rax)\n\t" \
2022 "movq 48(%%rax), %%r9\n\t" \
2023 "movq 40(%%rax), %%r8\n\t" \
2024 "movq 32(%%rax), %%rcx\n\t" \
2025 "movq 24(%%rax), %%rdx\n\t" \
2026 "movq 16(%%rax), %%rsi\n\t" \
2027 "movq 8(%%rax), %%rdi\n\t" \
2028 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2029 VALGRIND_CALL_NOREDIR_RAX \
2030 VALGRIND_RESTORE_STACK \
2031 VALGRIND_CFI_EPILOGUE \
2032 : /*out*/ "=a" (_res) \
2033 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2034 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2035 ); \
2036 lval = (__typeof__(lval)) _res; \
2037 } while (0)
2038
2039#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2040 arg7,arg8,arg9,arg10,arg11) \
2041 do { \
2042 volatile OrigFn _orig = (orig); \
2043 volatile unsigned long _argvec[12]; \
2044 volatile unsigned long _res; \
2045 _argvec[0] = (unsigned long)_orig.nraddr; \
2046 _argvec[1] = (unsigned long)(arg1); \
2047 _argvec[2] = (unsigned long)(arg2); \
2048 _argvec[3] = (unsigned long)(arg3); \
2049 _argvec[4] = (unsigned long)(arg4); \
2050 _argvec[5] = (unsigned long)(arg5); \
2051 _argvec[6] = (unsigned long)(arg6); \
2052 _argvec[7] = (unsigned long)(arg7); \
2053 _argvec[8] = (unsigned long)(arg8); \
2054 _argvec[9] = (unsigned long)(arg9); \
2055 _argvec[10] = (unsigned long)(arg10); \
2056 _argvec[11] = (unsigned long)(arg11); \
2057 __asm__ volatile( \
2058 VALGRIND_CFI_PROLOGUE \
2059 VALGRIND_ALIGN_STACK \
2060 "subq $136,%%rsp\n\t" \
2061 "pushq 88(%%rax)\n\t" \
2062 "pushq 80(%%rax)\n\t" \
2063 "pushq 72(%%rax)\n\t" \
2064 "pushq 64(%%rax)\n\t" \
2065 "pushq 56(%%rax)\n\t" \
2066 "movq 48(%%rax), %%r9\n\t" \
2067 "movq 40(%%rax), %%r8\n\t" \
2068 "movq 32(%%rax), %%rcx\n\t" \
2069 "movq 24(%%rax), %%rdx\n\t" \
2070 "movq 16(%%rax), %%rsi\n\t" \
2071 "movq 8(%%rax), %%rdi\n\t" \
2072 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2073 VALGRIND_CALL_NOREDIR_RAX \
2074 VALGRIND_RESTORE_STACK \
2075 VALGRIND_CFI_EPILOGUE \
2076 : /*out*/ "=a" (_res) \
2077 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2078 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2079 ); \
2080 lval = (__typeof__(lval)) _res; \
2081 } while (0)
2082
2083#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2084 arg7,arg8,arg9,arg10,arg11,arg12) \
2085 do { \
2086 volatile OrigFn _orig = (orig); \
2087 volatile unsigned long _argvec[13]; \
2088 volatile unsigned long _res; \
2089 _argvec[0] = (unsigned long)_orig.nraddr; \
2090 _argvec[1] = (unsigned long)(arg1); \
2091 _argvec[2] = (unsigned long)(arg2); \
2092 _argvec[3] = (unsigned long)(arg3); \
2093 _argvec[4] = (unsigned long)(arg4); \
2094 _argvec[5] = (unsigned long)(arg5); \
2095 _argvec[6] = (unsigned long)(arg6); \
2096 _argvec[7] = (unsigned long)(arg7); \
2097 _argvec[8] = (unsigned long)(arg8); \
2098 _argvec[9] = (unsigned long)(arg9); \
2099 _argvec[10] = (unsigned long)(arg10); \
2100 _argvec[11] = (unsigned long)(arg11); \
2101 _argvec[12] = (unsigned long)(arg12); \
2102 __asm__ volatile( \
2103 VALGRIND_CFI_PROLOGUE \
2104 VALGRIND_ALIGN_STACK \
2105 "subq $128,%%rsp\n\t" \
2106 "pushq 96(%%rax)\n\t" \
2107 "pushq 88(%%rax)\n\t" \
2108 "pushq 80(%%rax)\n\t" \
2109 "pushq 72(%%rax)\n\t" \
2110 "pushq 64(%%rax)\n\t" \
2111 "pushq 56(%%rax)\n\t" \
2112 "movq 48(%%rax), %%r9\n\t" \
2113 "movq 40(%%rax), %%r8\n\t" \
2114 "movq 32(%%rax), %%rcx\n\t" \
2115 "movq 24(%%rax), %%rdx\n\t" \
2116 "movq 16(%%rax), %%rsi\n\t" \
2117 "movq 8(%%rax), %%rdi\n\t" \
2118 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2119 VALGRIND_CALL_NOREDIR_RAX \
2120 VALGRIND_RESTORE_STACK \
2121 VALGRIND_CFI_EPILOGUE \
2122 : /*out*/ "=a" (_res) \
2123 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2124 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2125 ); \
2126 lval = (__typeof__(lval)) _res; \
2127 } while (0)
2128
2129#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2130
2131/* ------------------------ ppc32-linux ------------------------ */
2132
2133#if defined(PLAT_ppc32_linux)
2134
2135/* This is useful for finding out about the on-stack stuff:
2136
2137 extern int f9 ( int,int,int,int,int,int,int,int,int );
2138 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2139 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2140 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2141
2142 int g9 ( void ) {
2143 return f9(11,22,33,44,55,66,77,88,99);
2144 }
2145 int g10 ( void ) {
2146 return f10(11,22,33,44,55,66,77,88,99,110);
2147 }
2148 int g11 ( void ) {
2149 return f11(11,22,33,44,55,66,77,88,99,110,121);
2150 }
2151 int g12 ( void ) {
2152 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2153 }
2154*/
2155
2156/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2157
2158/* These regs are trashed by the hidden call. */
2159#define __CALLER_SAVED_REGS \
2160 "lr", "ctr", "xer", \
2161 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2162 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2163 "r11", "r12", "r13"
2164
2165/* Macros to save and align the stack before making a function
2166 call and restore it afterwards as gcc may not keep the stack
2167 pointer aligned if it doesn't realise calls are being made
2168 to other functions. */
2169
2170#define VALGRIND_ALIGN_STACK \
2171 "mr 28,1\n\t" \
2172 "rlwinm 1,1,0,0,27\n\t"
2173#define VALGRIND_RESTORE_STACK \
2174 "mr 1,28\n\t"
2175
2176/* These CALL_FN_ macros assume that on ppc32-linux,
2177 sizeof(unsigned long) == 4. */
2178
2179#define CALL_FN_W_v(lval, orig) \
2180 do { \
2181 volatile OrigFn _orig = (orig); \
2182 volatile unsigned long _argvec[1]; \
2183 volatile unsigned long _res; \
2184 _argvec[0] = (unsigned long)_orig.nraddr; \
2185 __asm__ volatile( \
2186 VALGRIND_ALIGN_STACK \
2187 "mr 11,%1\n\t" \
2188 "lwz 11,0(11)\n\t" /* target->r11 */ \
2189 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2190 VALGRIND_RESTORE_STACK \
2191 "mr %0,3" \
2192 : /*out*/ "=r" (_res) \
2193 : /*in*/ "r" (&_argvec[0]) \
2194 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2195 ); \
2196 lval = (__typeof__(lval)) _res; \
2197 } while (0)
2198
2199#define CALL_FN_W_W(lval, orig, arg1) \
2200 do { \
2201 volatile OrigFn _orig = (orig); \
2202 volatile unsigned long _argvec[2]; \
2203 volatile unsigned long _res; \
2204 _argvec[0] = (unsigned long)_orig.nraddr; \
2205 _argvec[1] = (unsigned long)arg1; \
2206 __asm__ volatile( \
2207 VALGRIND_ALIGN_STACK \
2208 "mr 11,%1\n\t" \
2209 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2210 "lwz 11,0(11)\n\t" /* target->r11 */ \
2211 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2212 VALGRIND_RESTORE_STACK \
2213 "mr %0,3" \
2214 : /*out*/ "=r" (_res) \
2215 : /*in*/ "r" (&_argvec[0]) \
2216 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2217 ); \
2218 lval = (__typeof__(lval)) _res; \
2219 } while (0)
2220
2221#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2222 do { \
2223 volatile OrigFn _orig = (orig); \
2224 volatile unsigned long _argvec[3]; \
2225 volatile unsigned long _res; \
2226 _argvec[0] = (unsigned long)_orig.nraddr; \
2227 _argvec[1] = (unsigned long)arg1; \
2228 _argvec[2] = (unsigned long)arg2; \
2229 __asm__ volatile( \
2230 VALGRIND_ALIGN_STACK \
2231 "mr 11,%1\n\t" \
2232 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2233 "lwz 4,8(11)\n\t" \
2234 "lwz 11,0(11)\n\t" /* target->r11 */ \
2235 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2236 VALGRIND_RESTORE_STACK \
2237 "mr %0,3" \
2238 : /*out*/ "=r" (_res) \
2239 : /*in*/ "r" (&_argvec[0]) \
2240 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2241 ); \
2242 lval = (__typeof__(lval)) _res; \
2243 } while (0)
2244
2245#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2246 do { \
2247 volatile OrigFn _orig = (orig); \
2248 volatile unsigned long _argvec[4]; \
2249 volatile unsigned long _res; \
2250 _argvec[0] = (unsigned long)_orig.nraddr; \
2251 _argvec[1] = (unsigned long)arg1; \
2252 _argvec[2] = (unsigned long)arg2; \
2253 _argvec[3] = (unsigned long)arg3; \
2254 __asm__ volatile( \
2255 VALGRIND_ALIGN_STACK \
2256 "mr 11,%1\n\t" \
2257 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2258 "lwz 4,8(11)\n\t" \
2259 "lwz 5,12(11)\n\t" \
2260 "lwz 11,0(11)\n\t" /* target->r11 */ \
2261 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2262 VALGRIND_RESTORE_STACK \
2263 "mr %0,3" \
2264 : /*out*/ "=r" (_res) \
2265 : /*in*/ "r" (&_argvec[0]) \
2266 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2267 ); \
2268 lval = (__typeof__(lval)) _res; \
2269 } while (0)
2270
2271#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2272 do { \
2273 volatile OrigFn _orig = (orig); \
2274 volatile unsigned long _argvec[5]; \
2275 volatile unsigned long _res; \
2276 _argvec[0] = (unsigned long)_orig.nraddr; \
2277 _argvec[1] = (unsigned long)arg1; \
2278 _argvec[2] = (unsigned long)arg2; \
2279 _argvec[3] = (unsigned long)arg3; \
2280 _argvec[4] = (unsigned long)arg4; \
2281 __asm__ volatile( \
2282 VALGRIND_ALIGN_STACK \
2283 "mr 11,%1\n\t" \
2284 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2285 "lwz 4,8(11)\n\t" \
2286 "lwz 5,12(11)\n\t" \
2287 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2288 "lwz 11,0(11)\n\t" /* target->r11 */ \
2289 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2290 VALGRIND_RESTORE_STACK \
2291 "mr %0,3" \
2292 : /*out*/ "=r" (_res) \
2293 : /*in*/ "r" (&_argvec[0]) \
2294 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2295 ); \
2296 lval = (__typeof__(lval)) _res; \
2297 } while (0)
2298
2299#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2300 do { \
2301 volatile OrigFn _orig = (orig); \
2302 volatile unsigned long _argvec[6]; \
2303 volatile unsigned long _res; \
2304 _argvec[0] = (unsigned long)_orig.nraddr; \
2305 _argvec[1] = (unsigned long)arg1; \
2306 _argvec[2] = (unsigned long)arg2; \
2307 _argvec[3] = (unsigned long)arg3; \
2308 _argvec[4] = (unsigned long)arg4; \
2309 _argvec[5] = (unsigned long)arg5; \
2310 __asm__ volatile( \
2311 VALGRIND_ALIGN_STACK \
2312 "mr 11,%1\n\t" \
2313 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2314 "lwz 4,8(11)\n\t" \
2315 "lwz 5,12(11)\n\t" \
2316 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2317 "lwz 7,20(11)\n\t" \
2318 "lwz 11,0(11)\n\t" /* target->r11 */ \
2319 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2320 VALGRIND_RESTORE_STACK \
2321 "mr %0,3" \
2322 : /*out*/ "=r" (_res) \
2323 : /*in*/ "r" (&_argvec[0]) \
2324 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2325 ); \
2326 lval = (__typeof__(lval)) _res; \
2327 } while (0)
2328
2329#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2330 do { \
2331 volatile OrigFn _orig = (orig); \
2332 volatile unsigned long _argvec[7]; \
2333 volatile unsigned long _res; \
2334 _argvec[0] = (unsigned long)_orig.nraddr; \
2335 _argvec[1] = (unsigned long)arg1; \
2336 _argvec[2] = (unsigned long)arg2; \
2337 _argvec[3] = (unsigned long)arg3; \
2338 _argvec[4] = (unsigned long)arg4; \
2339 _argvec[5] = (unsigned long)arg5; \
2340 _argvec[6] = (unsigned long)arg6; \
2341 __asm__ volatile( \
2342 VALGRIND_ALIGN_STACK \
2343 "mr 11,%1\n\t" \
2344 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2345 "lwz 4,8(11)\n\t" \
2346 "lwz 5,12(11)\n\t" \
2347 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2348 "lwz 7,20(11)\n\t" \
2349 "lwz 8,24(11)\n\t" \
2350 "lwz 11,0(11)\n\t" /* target->r11 */ \
2351 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2352 VALGRIND_RESTORE_STACK \
2353 "mr %0,3" \
2354 : /*out*/ "=r" (_res) \
2355 : /*in*/ "r" (&_argvec[0]) \
2356 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2357 ); \
2358 lval = (__typeof__(lval)) _res; \
2359 } while (0)
2360
2361#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2362 arg7) \
2363 do { \
2364 volatile OrigFn _orig = (orig); \
2365 volatile unsigned long _argvec[8]; \
2366 volatile unsigned long _res; \
2367 _argvec[0] = (unsigned long)_orig.nraddr; \
2368 _argvec[1] = (unsigned long)arg1; \
2369 _argvec[2] = (unsigned long)arg2; \
2370 _argvec[3] = (unsigned long)arg3; \
2371 _argvec[4] = (unsigned long)arg4; \
2372 _argvec[5] = (unsigned long)arg5; \
2373 _argvec[6] = (unsigned long)arg6; \
2374 _argvec[7] = (unsigned long)arg7; \
2375 __asm__ volatile( \
2376 VALGRIND_ALIGN_STACK \
2377 "mr 11,%1\n\t" \
2378 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2379 "lwz 4,8(11)\n\t" \
2380 "lwz 5,12(11)\n\t" \
2381 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2382 "lwz 7,20(11)\n\t" \
2383 "lwz 8,24(11)\n\t" \
2384 "lwz 9,28(11)\n\t" \
2385 "lwz 11,0(11)\n\t" /* target->r11 */ \
2386 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2387 VALGRIND_RESTORE_STACK \
2388 "mr %0,3" \
2389 : /*out*/ "=r" (_res) \
2390 : /*in*/ "r" (&_argvec[0]) \
2391 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2392 ); \
2393 lval = (__typeof__(lval)) _res; \
2394 } while (0)
2395
2396#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2397 arg7,arg8) \
2398 do { \
2399 volatile OrigFn _orig = (orig); \
2400 volatile unsigned long _argvec[9]; \
2401 volatile unsigned long _res; \
2402 _argvec[0] = (unsigned long)_orig.nraddr; \
2403 _argvec[1] = (unsigned long)arg1; \
2404 _argvec[2] = (unsigned long)arg2; \
2405 _argvec[3] = (unsigned long)arg3; \
2406 _argvec[4] = (unsigned long)arg4; \
2407 _argvec[5] = (unsigned long)arg5; \
2408 _argvec[6] = (unsigned long)arg6; \
2409 _argvec[7] = (unsigned long)arg7; \
2410 _argvec[8] = (unsigned long)arg8; \
2411 __asm__ volatile( \
2412 VALGRIND_ALIGN_STACK \
2413 "mr 11,%1\n\t" \
2414 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2415 "lwz 4,8(11)\n\t" \
2416 "lwz 5,12(11)\n\t" \
2417 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2418 "lwz 7,20(11)\n\t" \
2419 "lwz 8,24(11)\n\t" \
2420 "lwz 9,28(11)\n\t" \
2421 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2422 "lwz 11,0(11)\n\t" /* target->r11 */ \
2423 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2424 VALGRIND_RESTORE_STACK \
2425 "mr %0,3" \
2426 : /*out*/ "=r" (_res) \
2427 : /*in*/ "r" (&_argvec[0]) \
2428 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2429 ); \
2430 lval = (__typeof__(lval)) _res; \
2431 } while (0)
2432
2433#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2434 arg7,arg8,arg9) \
2435 do { \
2436 volatile OrigFn _orig = (orig); \
2437 volatile unsigned long _argvec[10]; \
2438 volatile unsigned long _res; \
2439 _argvec[0] = (unsigned long)_orig.nraddr; \
2440 _argvec[1] = (unsigned long)arg1; \
2441 _argvec[2] = (unsigned long)arg2; \
2442 _argvec[3] = (unsigned long)arg3; \
2443 _argvec[4] = (unsigned long)arg4; \
2444 _argvec[5] = (unsigned long)arg5; \
2445 _argvec[6] = (unsigned long)arg6; \
2446 _argvec[7] = (unsigned long)arg7; \
2447 _argvec[8] = (unsigned long)arg8; \
2448 _argvec[9] = (unsigned long)arg9; \
2449 __asm__ volatile( \
2450 VALGRIND_ALIGN_STACK \
2451 "mr 11,%1\n\t" \
2452 "addi 1,1,-16\n\t" \
2453 /* arg9 */ \
2454 "lwz 3,36(11)\n\t" \
2455 "stw 3,8(1)\n\t" \
2456 /* args1-8 */ \
2457 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2458 "lwz 4,8(11)\n\t" \
2459 "lwz 5,12(11)\n\t" \
2460 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2461 "lwz 7,20(11)\n\t" \
2462 "lwz 8,24(11)\n\t" \
2463 "lwz 9,28(11)\n\t" \
2464 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2465 "lwz 11,0(11)\n\t" /* target->r11 */ \
2466 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2467 VALGRIND_RESTORE_STACK \
2468 "mr %0,3" \
2469 : /*out*/ "=r" (_res) \
2470 : /*in*/ "r" (&_argvec[0]) \
2471 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2472 ); \
2473 lval = (__typeof__(lval)) _res; \
2474 } while (0)
2475
2476#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2477 arg7,arg8,arg9,arg10) \
2478 do { \
2479 volatile OrigFn _orig = (orig); \
2480 volatile unsigned long _argvec[11]; \
2481 volatile unsigned long _res; \
2482 _argvec[0] = (unsigned long)_orig.nraddr; \
2483 _argvec[1] = (unsigned long)arg1; \
2484 _argvec[2] = (unsigned long)arg2; \
2485 _argvec[3] = (unsigned long)arg3; \
2486 _argvec[4] = (unsigned long)arg4; \
2487 _argvec[5] = (unsigned long)arg5; \
2488 _argvec[6] = (unsigned long)arg6; \
2489 _argvec[7] = (unsigned long)arg7; \
2490 _argvec[8] = (unsigned long)arg8; \
2491 _argvec[9] = (unsigned long)arg9; \
2492 _argvec[10] = (unsigned long)arg10; \
2493 __asm__ volatile( \
2494 VALGRIND_ALIGN_STACK \
2495 "mr 11,%1\n\t" \
2496 "addi 1,1,-16\n\t" \
2497 /* arg10 */ \
2498 "lwz 3,40(11)\n\t" \
2499 "stw 3,12(1)\n\t" \
2500 /* arg9 */ \
2501 "lwz 3,36(11)\n\t" \
2502 "stw 3,8(1)\n\t" \
2503 /* args1-8 */ \
2504 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2505 "lwz 4,8(11)\n\t" \
2506 "lwz 5,12(11)\n\t" \
2507 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2508 "lwz 7,20(11)\n\t" \
2509 "lwz 8,24(11)\n\t" \
2510 "lwz 9,28(11)\n\t" \
2511 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2512 "lwz 11,0(11)\n\t" /* target->r11 */ \
2513 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2514 VALGRIND_RESTORE_STACK \
2515 "mr %0,3" \
2516 : /*out*/ "=r" (_res) \
2517 : /*in*/ "r" (&_argvec[0]) \
2518 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2519 ); \
2520 lval = (__typeof__(lval)) _res; \
2521 } while (0)
2522
2523#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2524 arg7,arg8,arg9,arg10,arg11) \
2525 do { \
2526 volatile OrigFn _orig = (orig); \
2527 volatile unsigned long _argvec[12]; \
2528 volatile unsigned long _res; \
2529 _argvec[0] = (unsigned long)_orig.nraddr; \
2530 _argvec[1] = (unsigned long)arg1; \
2531 _argvec[2] = (unsigned long)arg2; \
2532 _argvec[3] = (unsigned long)arg3; \
2533 _argvec[4] = (unsigned long)arg4; \
2534 _argvec[5] = (unsigned long)arg5; \
2535 _argvec[6] = (unsigned long)arg6; \
2536 _argvec[7] = (unsigned long)arg7; \
2537 _argvec[8] = (unsigned long)arg8; \
2538 _argvec[9] = (unsigned long)arg9; \
2539 _argvec[10] = (unsigned long)arg10; \
2540 _argvec[11] = (unsigned long)arg11; \
2541 __asm__ volatile( \
2542 VALGRIND_ALIGN_STACK \
2543 "mr 11,%1\n\t" \
2544 "addi 1,1,-32\n\t" \
2545 /* arg11 */ \
2546 "lwz 3,44(11)\n\t" \
2547 "stw 3,16(1)\n\t" \
2548 /* arg10 */ \
2549 "lwz 3,40(11)\n\t" \
2550 "stw 3,12(1)\n\t" \
2551 /* arg9 */ \
2552 "lwz 3,36(11)\n\t" \
2553 "stw 3,8(1)\n\t" \
2554 /* args1-8 */ \
2555 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2556 "lwz 4,8(11)\n\t" \
2557 "lwz 5,12(11)\n\t" \
2558 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2559 "lwz 7,20(11)\n\t" \
2560 "lwz 8,24(11)\n\t" \
2561 "lwz 9,28(11)\n\t" \
2562 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2563 "lwz 11,0(11)\n\t" /* target->r11 */ \
2564 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2565 VALGRIND_RESTORE_STACK \
2566 "mr %0,3" \
2567 : /*out*/ "=r" (_res) \
2568 : /*in*/ "r" (&_argvec[0]) \
2569 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2570 ); \
2571 lval = (__typeof__(lval)) _res; \
2572 } while (0)
2573
2574#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2575 arg7,arg8,arg9,arg10,arg11,arg12) \
2576 do { \
2577 volatile OrigFn _orig = (orig); \
2578 volatile unsigned long _argvec[13]; \
2579 volatile unsigned long _res; \
2580 _argvec[0] = (unsigned long)_orig.nraddr; \
2581 _argvec[1] = (unsigned long)arg1; \
2582 _argvec[2] = (unsigned long)arg2; \
2583 _argvec[3] = (unsigned long)arg3; \
2584 _argvec[4] = (unsigned long)arg4; \
2585 _argvec[5] = (unsigned long)arg5; \
2586 _argvec[6] = (unsigned long)arg6; \
2587 _argvec[7] = (unsigned long)arg7; \
2588 _argvec[8] = (unsigned long)arg8; \
2589 _argvec[9] = (unsigned long)arg9; \
2590 _argvec[10] = (unsigned long)arg10; \
2591 _argvec[11] = (unsigned long)arg11; \
2592 _argvec[12] = (unsigned long)arg12; \
2593 __asm__ volatile( \
2594 VALGRIND_ALIGN_STACK \
2595 "mr 11,%1\n\t" \
2596 "addi 1,1,-32\n\t" \
2597 /* arg12 */ \
2598 "lwz 3,48(11)\n\t" \
2599 "stw 3,20(1)\n\t" \
2600 /* arg11 */ \
2601 "lwz 3,44(11)\n\t" \
2602 "stw 3,16(1)\n\t" \
2603 /* arg10 */ \
2604 "lwz 3,40(11)\n\t" \
2605 "stw 3,12(1)\n\t" \
2606 /* arg9 */ \
2607 "lwz 3,36(11)\n\t" \
2608 "stw 3,8(1)\n\t" \
2609 /* args1-8 */ \
2610 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2611 "lwz 4,8(11)\n\t" \
2612 "lwz 5,12(11)\n\t" \
2613 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2614 "lwz 7,20(11)\n\t" \
2615 "lwz 8,24(11)\n\t" \
2616 "lwz 9,28(11)\n\t" \
2617 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2618 "lwz 11,0(11)\n\t" /* target->r11 */ \
2619 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2620 VALGRIND_RESTORE_STACK \
2621 "mr %0,3" \
2622 : /*out*/ "=r" (_res) \
2623 : /*in*/ "r" (&_argvec[0]) \
2624 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2625 ); \
2626 lval = (__typeof__(lval)) _res; \
2627 } while (0)
2628
2629#endif /* PLAT_ppc32_linux */
2630
2631/* ------------------------ ppc64-linux ------------------------ */
2632
2633#if defined(PLAT_ppc64be_linux)
2634
2635/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2636
2637/* These regs are trashed by the hidden call. */
2638#define __CALLER_SAVED_REGS \
2639 "lr", "ctr", "xer", \
2640 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2641 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2642 "r11", "r12", "r13"
2643
2644/* Macros to save and align the stack before making a function
2645 call and restore it afterwards as gcc may not keep the stack
2646 pointer aligned if it doesn't realise calls are being made
2647 to other functions. */
2648
2649#define VALGRIND_ALIGN_STACK \
2650 "mr 28,1\n\t" \
2651 "rldicr 1,1,0,59\n\t"
2652#define VALGRIND_RESTORE_STACK \
2653 "mr 1,28\n\t"
2654
2655/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2656 long) == 8. */
2657
2658#define CALL_FN_W_v(lval, orig) \
2659 do { \
2660 volatile OrigFn _orig = (orig); \
2661 volatile unsigned long _argvec[3+0]; \
2662 volatile unsigned long _res; \
2663 /* _argvec[0] holds current r2 across the call */ \
2664 _argvec[1] = (unsigned long)_orig.r2; \
2665 _argvec[2] = (unsigned long)_orig.nraddr; \
2666 __asm__ volatile( \
2667 VALGRIND_ALIGN_STACK \
2668 "mr 11,%1\n\t" \
2669 "std 2,-16(11)\n\t" /* save tocptr */ \
2670 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2671 "ld 11, 0(11)\n\t" /* target->r11 */ \
2672 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2673 "mr 11,%1\n\t" \
2674 "mr %0,3\n\t" \
2675 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2676 VALGRIND_RESTORE_STACK \
2677 : /*out*/ "=r" (_res) \
2678 : /*in*/ "r" (&_argvec[2]) \
2679 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2680 ); \
2681 lval = (__typeof__(lval)) _res; \
2682 } while (0)
2683
2684#define CALL_FN_W_W(lval, orig, arg1) \
2685 do { \
2686 volatile OrigFn _orig = (orig); \
2687 volatile unsigned long _argvec[3+1]; \
2688 volatile unsigned long _res; \
2689 /* _argvec[0] holds current r2 across the call */ \
2690 _argvec[1] = (unsigned long)_orig.r2; \
2691 _argvec[2] = (unsigned long)_orig.nraddr; \
2692 _argvec[2+1] = (unsigned long)arg1; \
2693 __asm__ volatile( \
2694 VALGRIND_ALIGN_STACK \
2695 "mr 11,%1\n\t" \
2696 "std 2,-16(11)\n\t" /* save tocptr */ \
2697 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2698 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2699 "ld 11, 0(11)\n\t" /* target->r11 */ \
2700 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2701 "mr 11,%1\n\t" \
2702 "mr %0,3\n\t" \
2703 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2704 VALGRIND_RESTORE_STACK \
2705 : /*out*/ "=r" (_res) \
2706 : /*in*/ "r" (&_argvec[2]) \
2707 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2708 ); \
2709 lval = (__typeof__(lval)) _res; \
2710 } while (0)
2711
2712#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2713 do { \
2714 volatile OrigFn _orig = (orig); \
2715 volatile unsigned long _argvec[3+2]; \
2716 volatile unsigned long _res; \
2717 /* _argvec[0] holds current r2 across the call */ \
2718 _argvec[1] = (unsigned long)_orig.r2; \
2719 _argvec[2] = (unsigned long)_orig.nraddr; \
2720 _argvec[2+1] = (unsigned long)arg1; \
2721 _argvec[2+2] = (unsigned long)arg2; \
2722 __asm__ volatile( \
2723 VALGRIND_ALIGN_STACK \
2724 "mr 11,%1\n\t" \
2725 "std 2,-16(11)\n\t" /* save tocptr */ \
2726 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2727 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2728 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2729 "ld 11, 0(11)\n\t" /* target->r11 */ \
2730 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2731 "mr 11,%1\n\t" \
2732 "mr %0,3\n\t" \
2733 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2734 VALGRIND_RESTORE_STACK \
2735 : /*out*/ "=r" (_res) \
2736 : /*in*/ "r" (&_argvec[2]) \
2737 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2738 ); \
2739 lval = (__typeof__(lval)) _res; \
2740 } while (0)
2741
2742#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2743 do { \
2744 volatile OrigFn _orig = (orig); \
2745 volatile unsigned long _argvec[3+3]; \
2746 volatile unsigned long _res; \
2747 /* _argvec[0] holds current r2 across the call */ \
2748 _argvec[1] = (unsigned long)_orig.r2; \
2749 _argvec[2] = (unsigned long)_orig.nraddr; \
2750 _argvec[2+1] = (unsigned long)arg1; \
2751 _argvec[2+2] = (unsigned long)arg2; \
2752 _argvec[2+3] = (unsigned long)arg3; \
2753 __asm__ volatile( \
2754 VALGRIND_ALIGN_STACK \
2755 "mr 11,%1\n\t" \
2756 "std 2,-16(11)\n\t" /* save tocptr */ \
2757 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2758 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2759 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2760 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2761 "ld 11, 0(11)\n\t" /* target->r11 */ \
2762 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2763 "mr 11,%1\n\t" \
2764 "mr %0,3\n\t" \
2765 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2766 VALGRIND_RESTORE_STACK \
2767 : /*out*/ "=r" (_res) \
2768 : /*in*/ "r" (&_argvec[2]) \
2769 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2770 ); \
2771 lval = (__typeof__(lval)) _res; \
2772 } while (0)
2773
2774#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2775 do { \
2776 volatile OrigFn _orig = (orig); \
2777 volatile unsigned long _argvec[3+4]; \
2778 volatile unsigned long _res; \
2779 /* _argvec[0] holds current r2 across the call */ \
2780 _argvec[1] = (unsigned long)_orig.r2; \
2781 _argvec[2] = (unsigned long)_orig.nraddr; \
2782 _argvec[2+1] = (unsigned long)arg1; \
2783 _argvec[2+2] = (unsigned long)arg2; \
2784 _argvec[2+3] = (unsigned long)arg3; \
2785 _argvec[2+4] = (unsigned long)arg4; \
2786 __asm__ volatile( \
2787 VALGRIND_ALIGN_STACK \
2788 "mr 11,%1\n\t" \
2789 "std 2,-16(11)\n\t" /* save tocptr */ \
2790 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2791 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2792 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2793 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2794 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2795 "ld 11, 0(11)\n\t" /* target->r11 */ \
2796 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2797 "mr 11,%1\n\t" \
2798 "mr %0,3\n\t" \
2799 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2800 VALGRIND_RESTORE_STACK \
2801 : /*out*/ "=r" (_res) \
2802 : /*in*/ "r" (&_argvec[2]) \
2803 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2804 ); \
2805 lval = (__typeof__(lval)) _res; \
2806 } while (0)
2807
2808#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2809 do { \
2810 volatile OrigFn _orig = (orig); \
2811 volatile unsigned long _argvec[3+5]; \
2812 volatile unsigned long _res; \
2813 /* _argvec[0] holds current r2 across the call */ \
2814 _argvec[1] = (unsigned long)_orig.r2; \
2815 _argvec[2] = (unsigned long)_orig.nraddr; \
2816 _argvec[2+1] = (unsigned long)arg1; \
2817 _argvec[2+2] = (unsigned long)arg2; \
2818 _argvec[2+3] = (unsigned long)arg3; \
2819 _argvec[2+4] = (unsigned long)arg4; \
2820 _argvec[2+5] = (unsigned long)arg5; \
2821 __asm__ volatile( \
2822 VALGRIND_ALIGN_STACK \
2823 "mr 11,%1\n\t" \
2824 "std 2,-16(11)\n\t" /* save tocptr */ \
2825 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2826 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2827 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2828 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2829 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2830 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2831 "ld 11, 0(11)\n\t" /* target->r11 */ \
2832 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2833 "mr 11,%1\n\t" \
2834 "mr %0,3\n\t" \
2835 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2836 VALGRIND_RESTORE_STACK \
2837 : /*out*/ "=r" (_res) \
2838 : /*in*/ "r" (&_argvec[2]) \
2839 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2840 ); \
2841 lval = (__typeof__(lval)) _res; \
2842 } while (0)
2843
2844#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2845 do { \
2846 volatile OrigFn _orig = (orig); \
2847 volatile unsigned long _argvec[3+6]; \
2848 volatile unsigned long _res; \
2849 /* _argvec[0] holds current r2 across the call */ \
2850 _argvec[1] = (unsigned long)_orig.r2; \
2851 _argvec[2] = (unsigned long)_orig.nraddr; \
2852 _argvec[2+1] = (unsigned long)arg1; \
2853 _argvec[2+2] = (unsigned long)arg2; \
2854 _argvec[2+3] = (unsigned long)arg3; \
2855 _argvec[2+4] = (unsigned long)arg4; \
2856 _argvec[2+5] = (unsigned long)arg5; \
2857 _argvec[2+6] = (unsigned long)arg6; \
2858 __asm__ volatile( \
2859 VALGRIND_ALIGN_STACK \
2860 "mr 11,%1\n\t" \
2861 "std 2,-16(11)\n\t" /* save tocptr */ \
2862 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2863 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2864 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2865 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2866 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2867 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2868 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2869 "ld 11, 0(11)\n\t" /* target->r11 */ \
2870 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2871 "mr 11,%1\n\t" \
2872 "mr %0,3\n\t" \
2873 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2874 VALGRIND_RESTORE_STACK \
2875 : /*out*/ "=r" (_res) \
2876 : /*in*/ "r" (&_argvec[2]) \
2877 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2878 ); \
2879 lval = (__typeof__(lval)) _res; \
2880 } while (0)
2881
2882#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2883 arg7) \
2884 do { \
2885 volatile OrigFn _orig = (orig); \
2886 volatile unsigned long _argvec[3+7]; \
2887 volatile unsigned long _res; \
2888 /* _argvec[0] holds current r2 across the call */ \
2889 _argvec[1] = (unsigned long)_orig.r2; \
2890 _argvec[2] = (unsigned long)_orig.nraddr; \
2891 _argvec[2+1] = (unsigned long)arg1; \
2892 _argvec[2+2] = (unsigned long)arg2; \
2893 _argvec[2+3] = (unsigned long)arg3; \
2894 _argvec[2+4] = (unsigned long)arg4; \
2895 _argvec[2+5] = (unsigned long)arg5; \
2896 _argvec[2+6] = (unsigned long)arg6; \
2897 _argvec[2+7] = (unsigned long)arg7; \
2898 __asm__ volatile( \
2899 VALGRIND_ALIGN_STACK \
2900 "mr 11,%1\n\t" \
2901 "std 2,-16(11)\n\t" /* save tocptr */ \
2902 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2903 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2904 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2905 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2906 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2907 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2908 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2909 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2910 "ld 11, 0(11)\n\t" /* target->r11 */ \
2911 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2912 "mr 11,%1\n\t" \
2913 "mr %0,3\n\t" \
2914 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2915 VALGRIND_RESTORE_STACK \
2916 : /*out*/ "=r" (_res) \
2917 : /*in*/ "r" (&_argvec[2]) \
2918 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2919 ); \
2920 lval = (__typeof__(lval)) _res; \
2921 } while (0)
2922
2923#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2924 arg7,arg8) \
2925 do { \
2926 volatile OrigFn _orig = (orig); \
2927 volatile unsigned long _argvec[3+8]; \
2928 volatile unsigned long _res; \
2929 /* _argvec[0] holds current r2 across the call */ \
2930 _argvec[1] = (unsigned long)_orig.r2; \
2931 _argvec[2] = (unsigned long)_orig.nraddr; \
2932 _argvec[2+1] = (unsigned long)arg1; \
2933 _argvec[2+2] = (unsigned long)arg2; \
2934 _argvec[2+3] = (unsigned long)arg3; \
2935 _argvec[2+4] = (unsigned long)arg4; \
2936 _argvec[2+5] = (unsigned long)arg5; \
2937 _argvec[2+6] = (unsigned long)arg6; \
2938 _argvec[2+7] = (unsigned long)arg7; \
2939 _argvec[2+8] = (unsigned long)arg8; \
2940 __asm__ volatile( \
2941 VALGRIND_ALIGN_STACK \
2942 "mr 11,%1\n\t" \
2943 "std 2,-16(11)\n\t" /* save tocptr */ \
2944 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2945 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2946 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2947 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2948 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2949 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2950 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2951 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2952 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2953 "ld 11, 0(11)\n\t" /* target->r11 */ \
2954 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2955 "mr 11,%1\n\t" \
2956 "mr %0,3\n\t" \
2957 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2958 VALGRIND_RESTORE_STACK \
2959 : /*out*/ "=r" (_res) \
2960 : /*in*/ "r" (&_argvec[2]) \
2961 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2962 ); \
2963 lval = (__typeof__(lval)) _res; \
2964 } while (0)
2965
2966#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2967 arg7,arg8,arg9) \
2968 do { \
2969 volatile OrigFn _orig = (orig); \
2970 volatile unsigned long _argvec[3+9]; \
2971 volatile unsigned long _res; \
2972 /* _argvec[0] holds current r2 across the call */ \
2973 _argvec[1] = (unsigned long)_orig.r2; \
2974 _argvec[2] = (unsigned long)_orig.nraddr; \
2975 _argvec[2+1] = (unsigned long)arg1; \
2976 _argvec[2+2] = (unsigned long)arg2; \
2977 _argvec[2+3] = (unsigned long)arg3; \
2978 _argvec[2+4] = (unsigned long)arg4; \
2979 _argvec[2+5] = (unsigned long)arg5; \
2980 _argvec[2+6] = (unsigned long)arg6; \
2981 _argvec[2+7] = (unsigned long)arg7; \
2982 _argvec[2+8] = (unsigned long)arg8; \
2983 _argvec[2+9] = (unsigned long)arg9; \
2984 __asm__ volatile( \
2985 VALGRIND_ALIGN_STACK \
2986 "mr 11,%1\n\t" \
2987 "std 2,-16(11)\n\t" /* save tocptr */ \
2988 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2989 "addi 1,1,-128\n\t" /* expand stack frame */ \
2990 /* arg9 */ \
2991 "ld 3,72(11)\n\t" \
2992 "std 3,112(1)\n\t" \
2993 /* args1-8 */ \
2994 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2995 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2996 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2997 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2998 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2999 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3000 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3001 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3002 "ld 11, 0(11)\n\t" /* target->r11 */ \
3003 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3004 "mr 11,%1\n\t" \
3005 "mr %0,3\n\t" \
3006 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3007 VALGRIND_RESTORE_STACK \
3008 : /*out*/ "=r" (_res) \
3009 : /*in*/ "r" (&_argvec[2]) \
3010 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3011 ); \
3012 lval = (__typeof__(lval)) _res; \
3013 } while (0)
3014
3015#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3016 arg7,arg8,arg9,arg10) \
3017 do { \
3018 volatile OrigFn _orig = (orig); \
3019 volatile unsigned long _argvec[3+10]; \
3020 volatile unsigned long _res; \
3021 /* _argvec[0] holds current r2 across the call */ \
3022 _argvec[1] = (unsigned long)_orig.r2; \
3023 _argvec[2] = (unsigned long)_orig.nraddr; \
3024 _argvec[2+1] = (unsigned long)arg1; \
3025 _argvec[2+2] = (unsigned long)arg2; \
3026 _argvec[2+3] = (unsigned long)arg3; \
3027 _argvec[2+4] = (unsigned long)arg4; \
3028 _argvec[2+5] = (unsigned long)arg5; \
3029 _argvec[2+6] = (unsigned long)arg6; \
3030 _argvec[2+7] = (unsigned long)arg7; \
3031 _argvec[2+8] = (unsigned long)arg8; \
3032 _argvec[2+9] = (unsigned long)arg9; \
3033 _argvec[2+10] = (unsigned long)arg10; \
3034 __asm__ volatile( \
3035 VALGRIND_ALIGN_STACK \
3036 "mr 11,%1\n\t" \
3037 "std 2,-16(11)\n\t" /* save tocptr */ \
3038 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3039 "addi 1,1,-128\n\t" /* expand stack frame */ \
3040 /* arg10 */ \
3041 "ld 3,80(11)\n\t" \
3042 "std 3,120(1)\n\t" \
3043 /* arg9 */ \
3044 "ld 3,72(11)\n\t" \
3045 "std 3,112(1)\n\t" \
3046 /* args1-8 */ \
3047 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3048 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3049 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3050 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3051 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3052 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3053 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3054 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3055 "ld 11, 0(11)\n\t" /* target->r11 */ \
3056 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3057 "mr 11,%1\n\t" \
3058 "mr %0,3\n\t" \
3059 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3060 VALGRIND_RESTORE_STACK \
3061 : /*out*/ "=r" (_res) \
3062 : /*in*/ "r" (&_argvec[2]) \
3063 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3064 ); \
3065 lval = (__typeof__(lval)) _res; \
3066 } while (0)
3067
3068#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3069 arg7,arg8,arg9,arg10,arg11) \
3070 do { \
3071 volatile OrigFn _orig = (orig); \
3072 volatile unsigned long _argvec[3+11]; \
3073 volatile unsigned long _res; \
3074 /* _argvec[0] holds current r2 across the call */ \
3075 _argvec[1] = (unsigned long)_orig.r2; \
3076 _argvec[2] = (unsigned long)_orig.nraddr; \
3077 _argvec[2+1] = (unsigned long)arg1; \
3078 _argvec[2+2] = (unsigned long)arg2; \
3079 _argvec[2+3] = (unsigned long)arg3; \
3080 _argvec[2+4] = (unsigned long)arg4; \
3081 _argvec[2+5] = (unsigned long)arg5; \
3082 _argvec[2+6] = (unsigned long)arg6; \
3083 _argvec[2+7] = (unsigned long)arg7; \
3084 _argvec[2+8] = (unsigned long)arg8; \
3085 _argvec[2+9] = (unsigned long)arg9; \
3086 _argvec[2+10] = (unsigned long)arg10; \
3087 _argvec[2+11] = (unsigned long)arg11; \
3088 __asm__ volatile( \
3089 VALGRIND_ALIGN_STACK \
3090 "mr 11,%1\n\t" \
3091 "std 2,-16(11)\n\t" /* save tocptr */ \
3092 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3093 "addi 1,1,-144\n\t" /* expand stack frame */ \
3094 /* arg11 */ \
3095 "ld 3,88(11)\n\t" \
3096 "std 3,128(1)\n\t" \
3097 /* arg10 */ \
3098 "ld 3,80(11)\n\t" \
3099 "std 3,120(1)\n\t" \
3100 /* arg9 */ \
3101 "ld 3,72(11)\n\t" \
3102 "std 3,112(1)\n\t" \
3103 /* args1-8 */ \
3104 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3105 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3106 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3107 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3108 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3109 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3110 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3111 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3112 "ld 11, 0(11)\n\t" /* target->r11 */ \
3113 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3114 "mr 11,%1\n\t" \
3115 "mr %0,3\n\t" \
3116 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3117 VALGRIND_RESTORE_STACK \
3118 : /*out*/ "=r" (_res) \
3119 : /*in*/ "r" (&_argvec[2]) \
3120 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3121 ); \
3122 lval = (__typeof__(lval)) _res; \
3123 } while (0)
3124
3125#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3126 arg7,arg8,arg9,arg10,arg11,arg12) \
3127 do { \
3128 volatile OrigFn _orig = (orig); \
3129 volatile unsigned long _argvec[3+12]; \
3130 volatile unsigned long _res; \
3131 /* _argvec[0] holds current r2 across the call */ \
3132 _argvec[1] = (unsigned long)_orig.r2; \
3133 _argvec[2] = (unsigned long)_orig.nraddr; \
3134 _argvec[2+1] = (unsigned long)arg1; \
3135 _argvec[2+2] = (unsigned long)arg2; \
3136 _argvec[2+3] = (unsigned long)arg3; \
3137 _argvec[2+4] = (unsigned long)arg4; \
3138 _argvec[2+5] = (unsigned long)arg5; \
3139 _argvec[2+6] = (unsigned long)arg6; \
3140 _argvec[2+7] = (unsigned long)arg7; \
3141 _argvec[2+8] = (unsigned long)arg8; \
3142 _argvec[2+9] = (unsigned long)arg9; \
3143 _argvec[2+10] = (unsigned long)arg10; \
3144 _argvec[2+11] = (unsigned long)arg11; \
3145 _argvec[2+12] = (unsigned long)arg12; \
3146 __asm__ volatile( \
3147 VALGRIND_ALIGN_STACK \
3148 "mr 11,%1\n\t" \
3149 "std 2,-16(11)\n\t" /* save tocptr */ \
3150 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3151 "addi 1,1,-144\n\t" /* expand stack frame */ \
3152 /* arg12 */ \
3153 "ld 3,96(11)\n\t" \
3154 "std 3,136(1)\n\t" \
3155 /* arg11 */ \
3156 "ld 3,88(11)\n\t" \
3157 "std 3,128(1)\n\t" \
3158 /* arg10 */ \
3159 "ld 3,80(11)\n\t" \
3160 "std 3,120(1)\n\t" \
3161 /* arg9 */ \
3162 "ld 3,72(11)\n\t" \
3163 "std 3,112(1)\n\t" \
3164 /* args1-8 */ \
3165 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3166 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3167 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3168 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3169 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3170 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3171 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3172 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3173 "ld 11, 0(11)\n\t" /* target->r11 */ \
3174 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3175 "mr 11,%1\n\t" \
3176 "mr %0,3\n\t" \
3177 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3178 VALGRIND_RESTORE_STACK \
3179 : /*out*/ "=r" (_res) \
3180 : /*in*/ "r" (&_argvec[2]) \
3181 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3182 ); \
3183 lval = (__typeof__(lval)) _res; \
3184 } while (0)
3185
3186#endif /* PLAT_ppc64be_linux */
3187
3188/* ------------------------- ppc64le-linux ----------------------- */
3189#if defined(PLAT_ppc64le_linux)
3190
3191/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3192
3193/* These regs are trashed by the hidden call. */
3194#define __CALLER_SAVED_REGS \
3195 "lr", "ctr", "xer", \
3196 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3197 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3198 "r11", "r12", "r13"
3199
3200/* Macros to save and align the stack before making a function
3201 call and restore it afterwards as gcc may not keep the stack
3202 pointer aligned if it doesn't realise calls are being made
3203 to other functions. */
3204
3205#define VALGRIND_ALIGN_STACK \
3206 "mr 28,1\n\t" \
3207 "rldicr 1,1,0,59\n\t"
3208#define VALGRIND_RESTORE_STACK \
3209 "mr 1,28\n\t"
3210
3211/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3212 long) == 8. */
3213
3214#define CALL_FN_W_v(lval, orig) \
3215 do { \
3216 volatile OrigFn _orig = (orig); \
3217 volatile unsigned long _argvec[3+0]; \
3218 volatile unsigned long _res; \
3219 /* _argvec[0] holds current r2 across the call */ \
3220 _argvec[1] = (unsigned long)_orig.r2; \
3221 _argvec[2] = (unsigned long)_orig.nraddr; \
3222 __asm__ volatile( \
3223 VALGRIND_ALIGN_STACK \
3224 "mr 12,%1\n\t" \
3225 "std 2,-16(12)\n\t" /* save tocptr */ \
3226 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3227 "ld 12, 0(12)\n\t" /* target->r12 */ \
3228 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3229 "mr 12,%1\n\t" \
3230 "mr %0,3\n\t" \
3231 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3232 VALGRIND_RESTORE_STACK \
3233 : /*out*/ "=r" (_res) \
3234 : /*in*/ "r" (&_argvec[2]) \
3235 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3236 ); \
3237 lval = (__typeof__(lval)) _res; \
3238 } while (0)
3239
3240#define CALL_FN_W_W(lval, orig, arg1) \
3241 do { \
3242 volatile OrigFn _orig = (orig); \
3243 volatile unsigned long _argvec[3+1]; \
3244 volatile unsigned long _res; \
3245 /* _argvec[0] holds current r2 across the call */ \
3246 _argvec[1] = (unsigned long)_orig.r2; \
3247 _argvec[2] = (unsigned long)_orig.nraddr; \
3248 _argvec[2+1] = (unsigned long)arg1; \
3249 __asm__ volatile( \
3250 VALGRIND_ALIGN_STACK \
3251 "mr 12,%1\n\t" \
3252 "std 2,-16(12)\n\t" /* save tocptr */ \
3253 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3254 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3255 "ld 12, 0(12)\n\t" /* target->r12 */ \
3256 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3257 "mr 12,%1\n\t" \
3258 "mr %0,3\n\t" \
3259 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3260 VALGRIND_RESTORE_STACK \
3261 : /*out*/ "=r" (_res) \
3262 : /*in*/ "r" (&_argvec[2]) \
3263 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3264 ); \
3265 lval = (__typeof__(lval)) _res; \
3266 } while (0)
3267
3268#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3269 do { \
3270 volatile OrigFn _orig = (orig); \
3271 volatile unsigned long _argvec[3+2]; \
3272 volatile unsigned long _res; \
3273 /* _argvec[0] holds current r2 across the call */ \
3274 _argvec[1] = (unsigned long)_orig.r2; \
3275 _argvec[2] = (unsigned long)_orig.nraddr; \
3276 _argvec[2+1] = (unsigned long)arg1; \
3277 _argvec[2+2] = (unsigned long)arg2; \
3278 __asm__ volatile( \
3279 VALGRIND_ALIGN_STACK \
3280 "mr 12,%1\n\t" \
3281 "std 2,-16(12)\n\t" /* save tocptr */ \
3282 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3283 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3284 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3285 "ld 12, 0(12)\n\t" /* target->r12 */ \
3286 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3287 "mr 12,%1\n\t" \
3288 "mr %0,3\n\t" \
3289 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3290 VALGRIND_RESTORE_STACK \
3291 : /*out*/ "=r" (_res) \
3292 : /*in*/ "r" (&_argvec[2]) \
3293 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3294 ); \
3295 lval = (__typeof__(lval)) _res; \
3296 } while (0)
3297
3298#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3299 do { \
3300 volatile OrigFn _orig = (orig); \
3301 volatile unsigned long _argvec[3+3]; \
3302 volatile unsigned long _res; \
3303 /* _argvec[0] holds current r2 across the call */ \
3304 _argvec[1] = (unsigned long)_orig.r2; \
3305 _argvec[2] = (unsigned long)_orig.nraddr; \
3306 _argvec[2+1] = (unsigned long)arg1; \
3307 _argvec[2+2] = (unsigned long)arg2; \
3308 _argvec[2+3] = (unsigned long)arg3; \
3309 __asm__ volatile( \
3310 VALGRIND_ALIGN_STACK \
3311 "mr 12,%1\n\t" \
3312 "std 2,-16(12)\n\t" /* save tocptr */ \
3313 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3314 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3315 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3316 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3317 "ld 12, 0(12)\n\t" /* target->r12 */ \
3318 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3319 "mr 12,%1\n\t" \
3320 "mr %0,3\n\t" \
3321 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3322 VALGRIND_RESTORE_STACK \
3323 : /*out*/ "=r" (_res) \
3324 : /*in*/ "r" (&_argvec[2]) \
3325 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3326 ); \
3327 lval = (__typeof__(lval)) _res; \
3328 } while (0)
3329
3330#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3331 do { \
3332 volatile OrigFn _orig = (orig); \
3333 volatile unsigned long _argvec[3+4]; \
3334 volatile unsigned long _res; \
3335 /* _argvec[0] holds current r2 across the call */ \
3336 _argvec[1] = (unsigned long)_orig.r2; \
3337 _argvec[2] = (unsigned long)_orig.nraddr; \
3338 _argvec[2+1] = (unsigned long)arg1; \
3339 _argvec[2+2] = (unsigned long)arg2; \
3340 _argvec[2+3] = (unsigned long)arg3; \
3341 _argvec[2+4] = (unsigned long)arg4; \
3342 __asm__ volatile( \
3343 VALGRIND_ALIGN_STACK \
3344 "mr 12,%1\n\t" \
3345 "std 2,-16(12)\n\t" /* save tocptr */ \
3346 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3347 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3348 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3349 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3350 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3351 "ld 12, 0(12)\n\t" /* target->r12 */ \
3352 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3353 "mr 12,%1\n\t" \
3354 "mr %0,3\n\t" \
3355 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3356 VALGRIND_RESTORE_STACK \
3357 : /*out*/ "=r" (_res) \
3358 : /*in*/ "r" (&_argvec[2]) \
3359 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3360 ); \
3361 lval = (__typeof__(lval)) _res; \
3362 } while (0)
3363
3364#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3365 do { \
3366 volatile OrigFn _orig = (orig); \
3367 volatile unsigned long _argvec[3+5]; \
3368 volatile unsigned long _res; \
3369 /* _argvec[0] holds current r2 across the call */ \
3370 _argvec[1] = (unsigned long)_orig.r2; \
3371 _argvec[2] = (unsigned long)_orig.nraddr; \
3372 _argvec[2+1] = (unsigned long)arg1; \
3373 _argvec[2+2] = (unsigned long)arg2; \
3374 _argvec[2+3] = (unsigned long)arg3; \
3375 _argvec[2+4] = (unsigned long)arg4; \
3376 _argvec[2+5] = (unsigned long)arg5; \
3377 __asm__ volatile( \
3378 VALGRIND_ALIGN_STACK \
3379 "mr 12,%1\n\t" \
3380 "std 2,-16(12)\n\t" /* save tocptr */ \
3381 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3382 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3383 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3384 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3385 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3386 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3387 "ld 12, 0(12)\n\t" /* target->r12 */ \
3388 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3389 "mr 12,%1\n\t" \
3390 "mr %0,3\n\t" \
3391 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3392 VALGRIND_RESTORE_STACK \
3393 : /*out*/ "=r" (_res) \
3394 : /*in*/ "r" (&_argvec[2]) \
3395 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3396 ); \
3397 lval = (__typeof__(lval)) _res; \
3398 } while (0)
3399
3400#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3401 do { \
3402 volatile OrigFn _orig = (orig); \
3403 volatile unsigned long _argvec[3+6]; \
3404 volatile unsigned long _res; \
3405 /* _argvec[0] holds current r2 across the call */ \
3406 _argvec[1] = (unsigned long)_orig.r2; \
3407 _argvec[2] = (unsigned long)_orig.nraddr; \
3408 _argvec[2+1] = (unsigned long)arg1; \
3409 _argvec[2+2] = (unsigned long)arg2; \
3410 _argvec[2+3] = (unsigned long)arg3; \
3411 _argvec[2+4] = (unsigned long)arg4; \
3412 _argvec[2+5] = (unsigned long)arg5; \
3413 _argvec[2+6] = (unsigned long)arg6; \
3414 __asm__ volatile( \
3415 VALGRIND_ALIGN_STACK \
3416 "mr 12,%1\n\t" \
3417 "std 2,-16(12)\n\t" /* save tocptr */ \
3418 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3419 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3420 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3421 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3422 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3423 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3424 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3425 "ld 12, 0(12)\n\t" /* target->r12 */ \
3426 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3427 "mr 12,%1\n\t" \
3428 "mr %0,3\n\t" \
3429 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3430 VALGRIND_RESTORE_STACK \
3431 : /*out*/ "=r" (_res) \
3432 : /*in*/ "r" (&_argvec[2]) \
3433 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3434 ); \
3435 lval = (__typeof__(lval)) _res; \
3436 } while (0)
3437
3438#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3439 arg7) \
3440 do { \
3441 volatile OrigFn _orig = (orig); \
3442 volatile unsigned long _argvec[3+7]; \
3443 volatile unsigned long _res; \
3444 /* _argvec[0] holds current r2 across the call */ \
3445 _argvec[1] = (unsigned long)_orig.r2; \
3446 _argvec[2] = (unsigned long)_orig.nraddr; \
3447 _argvec[2+1] = (unsigned long)arg1; \
3448 _argvec[2+2] = (unsigned long)arg2; \
3449 _argvec[2+3] = (unsigned long)arg3; \
3450 _argvec[2+4] = (unsigned long)arg4; \
3451 _argvec[2+5] = (unsigned long)arg5; \
3452 _argvec[2+6] = (unsigned long)arg6; \
3453 _argvec[2+7] = (unsigned long)arg7; \
3454 __asm__ volatile( \
3455 VALGRIND_ALIGN_STACK \
3456 "mr 12,%1\n\t" \
3457 "std 2,-16(12)\n\t" /* save tocptr */ \
3458 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3459 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3460 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3461 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3462 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3463 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3464 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3465 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3466 "ld 12, 0(12)\n\t" /* target->r12 */ \
3467 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3468 "mr 12,%1\n\t" \
3469 "mr %0,3\n\t" \
3470 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3471 VALGRIND_RESTORE_STACK \
3472 : /*out*/ "=r" (_res) \
3473 : /*in*/ "r" (&_argvec[2]) \
3474 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3475 ); \
3476 lval = (__typeof__(lval)) _res; \
3477 } while (0)
3478
3479#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3480 arg7,arg8) \
3481 do { \
3482 volatile OrigFn _orig = (orig); \
3483 volatile unsigned long _argvec[3+8]; \
3484 volatile unsigned long _res; \
3485 /* _argvec[0] holds current r2 across the call */ \
3486 _argvec[1] = (unsigned long)_orig.r2; \
3487 _argvec[2] = (unsigned long)_orig.nraddr; \
3488 _argvec[2+1] = (unsigned long)arg1; \
3489 _argvec[2+2] = (unsigned long)arg2; \
3490 _argvec[2+3] = (unsigned long)arg3; \
3491 _argvec[2+4] = (unsigned long)arg4; \
3492 _argvec[2+5] = (unsigned long)arg5; \
3493 _argvec[2+6] = (unsigned long)arg6; \
3494 _argvec[2+7] = (unsigned long)arg7; \
3495 _argvec[2+8] = (unsigned long)arg8; \
3496 __asm__ volatile( \
3497 VALGRIND_ALIGN_STACK \
3498 "mr 12,%1\n\t" \
3499 "std 2,-16(12)\n\t" /* save tocptr */ \
3500 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3501 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3502 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3503 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3504 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3505 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3506 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3507 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3508 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3509 "ld 12, 0(12)\n\t" /* target->r12 */ \
3510 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3511 "mr 12,%1\n\t" \
3512 "mr %0,3\n\t" \
3513 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3514 VALGRIND_RESTORE_STACK \
3515 : /*out*/ "=r" (_res) \
3516 : /*in*/ "r" (&_argvec[2]) \
3517 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3518 ); \
3519 lval = (__typeof__(lval)) _res; \
3520 } while (0)
3521
3522#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3523 arg7,arg8,arg9) \
3524 do { \
3525 volatile OrigFn _orig = (orig); \
3526 volatile unsigned long _argvec[3+9]; \
3527 volatile unsigned long _res; \
3528 /* _argvec[0] holds current r2 across the call */ \
3529 _argvec[1] = (unsigned long)_orig.r2; \
3530 _argvec[2] = (unsigned long)_orig.nraddr; \
3531 _argvec[2+1] = (unsigned long)arg1; \
3532 _argvec[2+2] = (unsigned long)arg2; \
3533 _argvec[2+3] = (unsigned long)arg3; \
3534 _argvec[2+4] = (unsigned long)arg4; \
3535 _argvec[2+5] = (unsigned long)arg5; \
3536 _argvec[2+6] = (unsigned long)arg6; \
3537 _argvec[2+7] = (unsigned long)arg7; \
3538 _argvec[2+8] = (unsigned long)arg8; \
3539 _argvec[2+9] = (unsigned long)arg9; \
3540 __asm__ volatile( \
3541 VALGRIND_ALIGN_STACK \
3542 "mr 12,%1\n\t" \
3543 "std 2,-16(12)\n\t" /* save tocptr */ \
3544 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3545 "addi 1,1,-128\n\t" /* expand stack frame */ \
3546 /* arg9 */ \
3547 "ld 3,72(12)\n\t" \
3548 "std 3,96(1)\n\t" \
3549 /* args1-8 */ \
3550 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3551 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3552 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3553 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3554 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3555 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3556 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3557 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3558 "ld 12, 0(12)\n\t" /* target->r12 */ \
3559 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3560 "mr 12,%1\n\t" \
3561 "mr %0,3\n\t" \
3562 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3563 VALGRIND_RESTORE_STACK \
3564 : /*out*/ "=r" (_res) \
3565 : /*in*/ "r" (&_argvec[2]) \
3566 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3567 ); \
3568 lval = (__typeof__(lval)) _res; \
3569 } while (0)
3570
3571#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3572 arg7,arg8,arg9,arg10) \
3573 do { \
3574 volatile OrigFn _orig = (orig); \
3575 volatile unsigned long _argvec[3+10]; \
3576 volatile unsigned long _res; \
3577 /* _argvec[0] holds current r2 across the call */ \
3578 _argvec[1] = (unsigned long)_orig.r2; \
3579 _argvec[2] = (unsigned long)_orig.nraddr; \
3580 _argvec[2+1] = (unsigned long)arg1; \
3581 _argvec[2+2] = (unsigned long)arg2; \
3582 _argvec[2+3] = (unsigned long)arg3; \
3583 _argvec[2+4] = (unsigned long)arg4; \
3584 _argvec[2+5] = (unsigned long)arg5; \
3585 _argvec[2+6] = (unsigned long)arg6; \
3586 _argvec[2+7] = (unsigned long)arg7; \
3587 _argvec[2+8] = (unsigned long)arg8; \
3588 _argvec[2+9] = (unsigned long)arg9; \
3589 _argvec[2+10] = (unsigned long)arg10; \
3590 __asm__ volatile( \
3591 VALGRIND_ALIGN_STACK \
3592 "mr 12,%1\n\t" \
3593 "std 2,-16(12)\n\t" /* save tocptr */ \
3594 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3595 "addi 1,1,-128\n\t" /* expand stack frame */ \
3596 /* arg10 */ \
3597 "ld 3,80(12)\n\t" \
3598 "std 3,104(1)\n\t" \
3599 /* arg9 */ \
3600 "ld 3,72(12)\n\t" \
3601 "std 3,96(1)\n\t" \
3602 /* args1-8 */ \
3603 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3604 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3605 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3606 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3607 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3608 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3609 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3610 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3611 "ld 12, 0(12)\n\t" /* target->r12 */ \
3612 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3613 "mr 12,%1\n\t" \
3614 "mr %0,3\n\t" \
3615 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3616 VALGRIND_RESTORE_STACK \
3617 : /*out*/ "=r" (_res) \
3618 : /*in*/ "r" (&_argvec[2]) \
3619 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3620 ); \
3621 lval = (__typeof__(lval)) _res; \
3622 } while (0)
3623
3624#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3625 arg7,arg8,arg9,arg10,arg11) \
3626 do { \
3627 volatile OrigFn _orig = (orig); \
3628 volatile unsigned long _argvec[3+11]; \
3629 volatile unsigned long _res; \
3630 /* _argvec[0] holds current r2 across the call */ \
3631 _argvec[1] = (unsigned long)_orig.r2; \
3632 _argvec[2] = (unsigned long)_orig.nraddr; \
3633 _argvec[2+1] = (unsigned long)arg1; \
3634 _argvec[2+2] = (unsigned long)arg2; \
3635 _argvec[2+3] = (unsigned long)arg3; \
3636 _argvec[2+4] = (unsigned long)arg4; \
3637 _argvec[2+5] = (unsigned long)arg5; \
3638 _argvec[2+6] = (unsigned long)arg6; \
3639 _argvec[2+7] = (unsigned long)arg7; \
3640 _argvec[2+8] = (unsigned long)arg8; \
3641 _argvec[2+9] = (unsigned long)arg9; \
3642 _argvec[2+10] = (unsigned long)arg10; \
3643 _argvec[2+11] = (unsigned long)arg11; \
3644 __asm__ volatile( \
3645 VALGRIND_ALIGN_STACK \
3646 "mr 12,%1\n\t" \
3647 "std 2,-16(12)\n\t" /* save tocptr */ \
3648 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3649 "addi 1,1,-144\n\t" /* expand stack frame */ \
3650 /* arg11 */ \
3651 "ld 3,88(12)\n\t" \
3652 "std 3,112(1)\n\t" \
3653 /* arg10 */ \
3654 "ld 3,80(12)\n\t" \
3655 "std 3,104(1)\n\t" \
3656 /* arg9 */ \
3657 "ld 3,72(12)\n\t" \
3658 "std 3,96(1)\n\t" \
3659 /* args1-8 */ \
3660 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3661 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3662 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3663 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3664 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3665 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3666 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3667 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3668 "ld 12, 0(12)\n\t" /* target->r12 */ \
3669 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3670 "mr 12,%1\n\t" \
3671 "mr %0,3\n\t" \
3672 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3673 VALGRIND_RESTORE_STACK \
3674 : /*out*/ "=r" (_res) \
3675 : /*in*/ "r" (&_argvec[2]) \
3676 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3677 ); \
3678 lval = (__typeof__(lval)) _res; \
3679 } while (0)
3680
3681#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3682 arg7,arg8,arg9,arg10,arg11,arg12) \
3683 do { \
3684 volatile OrigFn _orig = (orig); \
3685 volatile unsigned long _argvec[3+12]; \
3686 volatile unsigned long _res; \
3687 /* _argvec[0] holds current r2 across the call */ \
3688 _argvec[1] = (unsigned long)_orig.r2; \
3689 _argvec[2] = (unsigned long)_orig.nraddr; \
3690 _argvec[2+1] = (unsigned long)arg1; \
3691 _argvec[2+2] = (unsigned long)arg2; \
3692 _argvec[2+3] = (unsigned long)arg3; \
3693 _argvec[2+4] = (unsigned long)arg4; \
3694 _argvec[2+5] = (unsigned long)arg5; \
3695 _argvec[2+6] = (unsigned long)arg6; \
3696 _argvec[2+7] = (unsigned long)arg7; \
3697 _argvec[2+8] = (unsigned long)arg8; \
3698 _argvec[2+9] = (unsigned long)arg9; \
3699 _argvec[2+10] = (unsigned long)arg10; \
3700 _argvec[2+11] = (unsigned long)arg11; \
3701 _argvec[2+12] = (unsigned long)arg12; \
3702 __asm__ volatile( \
3703 VALGRIND_ALIGN_STACK \
3704 "mr 12,%1\n\t" \
3705 "std 2,-16(12)\n\t" /* save tocptr */ \
3706 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3707 "addi 1,1,-144\n\t" /* expand stack frame */ \
3708 /* arg12 */ \
3709 "ld 3,96(12)\n\t" \
3710 "std 3,120(1)\n\t" \
3711 /* arg11 */ \
3712 "ld 3,88(12)\n\t" \
3713 "std 3,112(1)\n\t" \
3714 /* arg10 */ \
3715 "ld 3,80(12)\n\t" \
3716 "std 3,104(1)\n\t" \
3717 /* arg9 */ \
3718 "ld 3,72(12)\n\t" \
3719 "std 3,96(1)\n\t" \
3720 /* args1-8 */ \
3721 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3722 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3723 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3724 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3725 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3726 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3727 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3728 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3729 "ld 12, 0(12)\n\t" /* target->r12 */ \
3730 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3731 "mr 12,%1\n\t" \
3732 "mr %0,3\n\t" \
3733 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3734 VALGRIND_RESTORE_STACK \
3735 : /*out*/ "=r" (_res) \
3736 : /*in*/ "r" (&_argvec[2]) \
3737 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3738 ); \
3739 lval = (__typeof__(lval)) _res; \
3740 } while (0)
3741
3742#endif /* PLAT_ppc64le_linux */
3743
3744/* ------------------------- arm-linux ------------------------- */
3745
3746#if defined(PLAT_arm_linux)
3747
3748/* These regs are trashed by the hidden call. */
3749#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3750
3751/* Macros to save and align the stack before making a function
3752 call and restore it afterwards as gcc may not keep the stack
3753 pointer aligned if it doesn't realise calls are being made
3754 to other functions. */
3755
3756/* This is a bit tricky. We store the original stack pointer in r10
3757 as it is callee-saves. gcc doesn't allow the use of r11 for some
3758 reason. Also, we can't directly "bic" the stack pointer in thumb
3759 mode since r13 isn't an allowed register number in that context.
3760 So use r4 as a temporary, since that is about to get trashed
3761 anyway, just after each use of this macro. Side effect is we need
3762 to be very careful about any future changes, since
3763 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3764#define VALGRIND_ALIGN_STACK \
3765 "mov r10, sp\n\t" \
3766 "mov r4, sp\n\t" \
3767 "bic r4, r4, #7\n\t" \
3768 "mov sp, r4\n\t"
3769#define VALGRIND_RESTORE_STACK \
3770 "mov sp, r10\n\t"
3771
3772/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3773 long) == 4. */
3774
3775#define CALL_FN_W_v(lval, orig) \
3776 do { \
3777 volatile OrigFn _orig = (orig); \
3778 volatile unsigned long _argvec[1]; \
3779 volatile unsigned long _res; \
3780 _argvec[0] = (unsigned long)_orig.nraddr; \
3781 __asm__ volatile( \
3782 VALGRIND_ALIGN_STACK \
3783 "ldr r4, [%1] \n\t" /* target->r4 */ \
3784 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3785 VALGRIND_RESTORE_STACK \
3786 "mov %0, r0\n" \
3787 : /*out*/ "=r" (_res) \
3788 : /*in*/ "0" (&_argvec[0]) \
3789 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3790 ); \
3791 lval = (__typeof__(lval)) _res; \
3792 } while (0)
3793
3794#define CALL_FN_W_W(lval, orig, arg1) \
3795 do { \
3796 volatile OrigFn _orig = (orig); \
3797 volatile unsigned long _argvec[2]; \
3798 volatile unsigned long _res; \
3799 _argvec[0] = (unsigned long)_orig.nraddr; \
3800 _argvec[1] = (unsigned long)(arg1); \
3801 __asm__ volatile( \
3802 VALGRIND_ALIGN_STACK \
3803 "ldr r0, [%1, #4] \n\t" \
3804 "ldr r4, [%1] \n\t" /* target->r4 */ \
3805 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3806 VALGRIND_RESTORE_STACK \
3807 "mov %0, r0\n" \
3808 : /*out*/ "=r" (_res) \
3809 : /*in*/ "0" (&_argvec[0]) \
3810 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3811 ); \
3812 lval = (__typeof__(lval)) _res; \
3813 } while (0)
3814
3815#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3816 do { \
3817 volatile OrigFn _orig = (orig); \
3818 volatile unsigned long _argvec[3]; \
3819 volatile unsigned long _res; \
3820 _argvec[0] = (unsigned long)_orig.nraddr; \
3821 _argvec[1] = (unsigned long)(arg1); \
3822 _argvec[2] = (unsigned long)(arg2); \
3823 __asm__ volatile( \
3824 VALGRIND_ALIGN_STACK \
3825 "ldr r0, [%1, #4] \n\t" \
3826 "ldr r1, [%1, #8] \n\t" \
3827 "ldr r4, [%1] \n\t" /* target->r4 */ \
3828 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3829 VALGRIND_RESTORE_STACK \
3830 "mov %0, r0\n" \
3831 : /*out*/ "=r" (_res) \
3832 : /*in*/ "0" (&_argvec[0]) \
3833 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3834 ); \
3835 lval = (__typeof__(lval)) _res; \
3836 } while (0)
3837
3838#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3839 do { \
3840 volatile OrigFn _orig = (orig); \
3841 volatile unsigned long _argvec[4]; \
3842 volatile unsigned long _res; \
3843 _argvec[0] = (unsigned long)_orig.nraddr; \
3844 _argvec[1] = (unsigned long)(arg1); \
3845 _argvec[2] = (unsigned long)(arg2); \
3846 _argvec[3] = (unsigned long)(arg3); \
3847 __asm__ volatile( \
3848 VALGRIND_ALIGN_STACK \
3849 "ldr r0, [%1, #4] \n\t" \
3850 "ldr r1, [%1, #8] \n\t" \
3851 "ldr r2, [%1, #12] \n\t" \
3852 "ldr r4, [%1] \n\t" /* target->r4 */ \
3853 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3854 VALGRIND_RESTORE_STACK \
3855 "mov %0, r0\n" \
3856 : /*out*/ "=r" (_res) \
3857 : /*in*/ "0" (&_argvec[0]) \
3858 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3859 ); \
3860 lval = (__typeof__(lval)) _res; \
3861 } while (0)
3862
3863#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3864 do { \
3865 volatile OrigFn _orig = (orig); \
3866 volatile unsigned long _argvec[5]; \
3867 volatile unsigned long _res; \
3868 _argvec[0] = (unsigned long)_orig.nraddr; \
3869 _argvec[1] = (unsigned long)(arg1); \
3870 _argvec[2] = (unsigned long)(arg2); \
3871 _argvec[3] = (unsigned long)(arg3); \
3872 _argvec[4] = (unsigned long)(arg4); \
3873 __asm__ volatile( \
3874 VALGRIND_ALIGN_STACK \
3875 "ldr r0, [%1, #4] \n\t" \
3876 "ldr r1, [%1, #8] \n\t" \
3877 "ldr r2, [%1, #12] \n\t" \
3878 "ldr r3, [%1, #16] \n\t" \
3879 "ldr r4, [%1] \n\t" /* target->r4 */ \
3880 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3881 VALGRIND_RESTORE_STACK \
3882 "mov %0, r0" \
3883 : /*out*/ "=r" (_res) \
3884 : /*in*/ "0" (&_argvec[0]) \
3885 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3886 ); \
3887 lval = (__typeof__(lval)) _res; \
3888 } while (0)
3889
3890#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3891 do { \
3892 volatile OrigFn _orig = (orig); \
3893 volatile unsigned long _argvec[6]; \
3894 volatile unsigned long _res; \
3895 _argvec[0] = (unsigned long)_orig.nraddr; \
3896 _argvec[1] = (unsigned long)(arg1); \
3897 _argvec[2] = (unsigned long)(arg2); \
3898 _argvec[3] = (unsigned long)(arg3); \
3899 _argvec[4] = (unsigned long)(arg4); \
3900 _argvec[5] = (unsigned long)(arg5); \
3901 __asm__ volatile( \
3902 VALGRIND_ALIGN_STACK \
3903 "sub sp, sp, #4 \n\t" \
3904 "ldr r0, [%1, #20] \n\t" \
3905 "push {r0} \n\t" \
3906 "ldr r0, [%1, #4] \n\t" \
3907 "ldr r1, [%1, #8] \n\t" \
3908 "ldr r2, [%1, #12] \n\t" \
3909 "ldr r3, [%1, #16] \n\t" \
3910 "ldr r4, [%1] \n\t" /* target->r4 */ \
3911 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3912 VALGRIND_RESTORE_STACK \
3913 "mov %0, r0" \
3914 : /*out*/ "=r" (_res) \
3915 : /*in*/ "0" (&_argvec[0]) \
3916 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3917 ); \
3918 lval = (__typeof__(lval)) _res; \
3919 } while (0)
3920
3921#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3922 do { \
3923 volatile OrigFn _orig = (orig); \
3924 volatile unsigned long _argvec[7]; \
3925 volatile unsigned long _res; \
3926 _argvec[0] = (unsigned long)_orig.nraddr; \
3927 _argvec[1] = (unsigned long)(arg1); \
3928 _argvec[2] = (unsigned long)(arg2); \
3929 _argvec[3] = (unsigned long)(arg3); \
3930 _argvec[4] = (unsigned long)(arg4); \
3931 _argvec[5] = (unsigned long)(arg5); \
3932 _argvec[6] = (unsigned long)(arg6); \
3933 __asm__ volatile( \
3934 VALGRIND_ALIGN_STACK \
3935 "ldr r0, [%1, #20] \n\t" \
3936 "ldr r1, [%1, #24] \n\t" \
3937 "push {r0, r1} \n\t" \
3938 "ldr r0, [%1, #4] \n\t" \
3939 "ldr r1, [%1, #8] \n\t" \
3940 "ldr r2, [%1, #12] \n\t" \
3941 "ldr r3, [%1, #16] \n\t" \
3942 "ldr r4, [%1] \n\t" /* target->r4 */ \
3943 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3944 VALGRIND_RESTORE_STACK \
3945 "mov %0, r0" \
3946 : /*out*/ "=r" (_res) \
3947 : /*in*/ "0" (&_argvec[0]) \
3948 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3949 ); \
3950 lval = (__typeof__(lval)) _res; \
3951 } while (0)
3952
3953#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3954 arg7) \
3955 do { \
3956 volatile OrigFn _orig = (orig); \
3957 volatile unsigned long _argvec[8]; \
3958 volatile unsigned long _res; \
3959 _argvec[0] = (unsigned long)_orig.nraddr; \
3960 _argvec[1] = (unsigned long)(arg1); \
3961 _argvec[2] = (unsigned long)(arg2); \
3962 _argvec[3] = (unsigned long)(arg3); \
3963 _argvec[4] = (unsigned long)(arg4); \
3964 _argvec[5] = (unsigned long)(arg5); \
3965 _argvec[6] = (unsigned long)(arg6); \
3966 _argvec[7] = (unsigned long)(arg7); \
3967 __asm__ volatile( \
3968 VALGRIND_ALIGN_STACK \
3969 "sub sp, sp, #4 \n\t" \
3970 "ldr r0, [%1, #20] \n\t" \
3971 "ldr r1, [%1, #24] \n\t" \
3972 "ldr r2, [%1, #28] \n\t" \
3973 "push {r0, r1, r2} \n\t" \
3974 "ldr r0, [%1, #4] \n\t" \
3975 "ldr r1, [%1, #8] \n\t" \
3976 "ldr r2, [%1, #12] \n\t" \
3977 "ldr r3, [%1, #16] \n\t" \
3978 "ldr r4, [%1] \n\t" /* target->r4 */ \
3979 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3980 VALGRIND_RESTORE_STACK \
3981 "mov %0, r0" \
3982 : /*out*/ "=r" (_res) \
3983 : /*in*/ "0" (&_argvec[0]) \
3984 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3985 ); \
3986 lval = (__typeof__(lval)) _res; \
3987 } while (0)
3988
3989#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3990 arg7,arg8) \
3991 do { \
3992 volatile OrigFn _orig = (orig); \
3993 volatile unsigned long _argvec[9]; \
3994 volatile unsigned long _res; \
3995 _argvec[0] = (unsigned long)_orig.nraddr; \
3996 _argvec[1] = (unsigned long)(arg1); \
3997 _argvec[2] = (unsigned long)(arg2); \
3998 _argvec[3] = (unsigned long)(arg3); \
3999 _argvec[4] = (unsigned long)(arg4); \
4000 _argvec[5] = (unsigned long)(arg5); \
4001 _argvec[6] = (unsigned long)(arg6); \
4002 _argvec[7] = (unsigned long)(arg7); \
4003 _argvec[8] = (unsigned long)(arg8); \
4004 __asm__ volatile( \
4005 VALGRIND_ALIGN_STACK \
4006 "ldr r0, [%1, #20] \n\t" \
4007 "ldr r1, [%1, #24] \n\t" \
4008 "ldr r2, [%1, #28] \n\t" \
4009 "ldr r3, [%1, #32] \n\t" \
4010 "push {r0, r1, r2, r3} \n\t" \
4011 "ldr r0, [%1, #4] \n\t" \
4012 "ldr r1, [%1, #8] \n\t" \
4013 "ldr r2, [%1, #12] \n\t" \
4014 "ldr r3, [%1, #16] \n\t" \
4015 "ldr r4, [%1] \n\t" /* target->r4 */ \
4016 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4017 VALGRIND_RESTORE_STACK \
4018 "mov %0, r0" \
4019 : /*out*/ "=r" (_res) \
4020 : /*in*/ "0" (&_argvec[0]) \
4021 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4022 ); \
4023 lval = (__typeof__(lval)) _res; \
4024 } while (0)
4025
4026#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4027 arg7,arg8,arg9) \
4028 do { \
4029 volatile OrigFn _orig = (orig); \
4030 volatile unsigned long _argvec[10]; \
4031 volatile unsigned long _res; \
4032 _argvec[0] = (unsigned long)_orig.nraddr; \
4033 _argvec[1] = (unsigned long)(arg1); \
4034 _argvec[2] = (unsigned long)(arg2); \
4035 _argvec[3] = (unsigned long)(arg3); \
4036 _argvec[4] = (unsigned long)(arg4); \
4037 _argvec[5] = (unsigned long)(arg5); \
4038 _argvec[6] = (unsigned long)(arg6); \
4039 _argvec[7] = (unsigned long)(arg7); \
4040 _argvec[8] = (unsigned long)(arg8); \
4041 _argvec[9] = (unsigned long)(arg9); \
4042 __asm__ volatile( \
4043 VALGRIND_ALIGN_STACK \
4044 "sub sp, sp, #4 \n\t" \
4045 "ldr r0, [%1, #20] \n\t" \
4046 "ldr r1, [%1, #24] \n\t" \
4047 "ldr r2, [%1, #28] \n\t" \
4048 "ldr r3, [%1, #32] \n\t" \
4049 "ldr r4, [%1, #36] \n\t" \
4050 "push {r0, r1, r2, r3, r4} \n\t" \
4051 "ldr r0, [%1, #4] \n\t" \
4052 "ldr r1, [%1, #8] \n\t" \
4053 "ldr r2, [%1, #12] \n\t" \
4054 "ldr r3, [%1, #16] \n\t" \
4055 "ldr r4, [%1] \n\t" /* target->r4 */ \
4056 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4057 VALGRIND_RESTORE_STACK \
4058 "mov %0, r0" \
4059 : /*out*/ "=r" (_res) \
4060 : /*in*/ "0" (&_argvec[0]) \
4061 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4062 ); \
4063 lval = (__typeof__(lval)) _res; \
4064 } while (0)
4065
4066#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4067 arg7,arg8,arg9,arg10) \
4068 do { \
4069 volatile OrigFn _orig = (orig); \
4070 volatile unsigned long _argvec[11]; \
4071 volatile unsigned long _res; \
4072 _argvec[0] = (unsigned long)_orig.nraddr; \
4073 _argvec[1] = (unsigned long)(arg1); \
4074 _argvec[2] = (unsigned long)(arg2); \
4075 _argvec[3] = (unsigned long)(arg3); \
4076 _argvec[4] = (unsigned long)(arg4); \
4077 _argvec[5] = (unsigned long)(arg5); \
4078 _argvec[6] = (unsigned long)(arg6); \
4079 _argvec[7] = (unsigned long)(arg7); \
4080 _argvec[8] = (unsigned long)(arg8); \
4081 _argvec[9] = (unsigned long)(arg9); \
4082 _argvec[10] = (unsigned long)(arg10); \
4083 __asm__ volatile( \
4084 VALGRIND_ALIGN_STACK \
4085 "ldr r0, [%1, #40] \n\t" \
4086 "push {r0} \n\t" \
4087 "ldr r0, [%1, #20] \n\t" \
4088 "ldr r1, [%1, #24] \n\t" \
4089 "ldr r2, [%1, #28] \n\t" \
4090 "ldr r3, [%1, #32] \n\t" \
4091 "ldr r4, [%1, #36] \n\t" \
4092 "push {r0, r1, r2, r3, r4} \n\t" \
4093 "ldr r0, [%1, #4] \n\t" \
4094 "ldr r1, [%1, #8] \n\t" \
4095 "ldr r2, [%1, #12] \n\t" \
4096 "ldr r3, [%1, #16] \n\t" \
4097 "ldr r4, [%1] \n\t" /* target->r4 */ \
4098 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4099 VALGRIND_RESTORE_STACK \
4100 "mov %0, r0" \
4101 : /*out*/ "=r" (_res) \
4102 : /*in*/ "0" (&_argvec[0]) \
4103 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4104 ); \
4105 lval = (__typeof__(lval)) _res; \
4106 } while (0)
4107
4108#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4109 arg6,arg7,arg8,arg9,arg10, \
4110 arg11) \
4111 do { \
4112 volatile OrigFn _orig = (orig); \
4113 volatile unsigned long _argvec[12]; \
4114 volatile unsigned long _res; \
4115 _argvec[0] = (unsigned long)_orig.nraddr; \
4116 _argvec[1] = (unsigned long)(arg1); \
4117 _argvec[2] = (unsigned long)(arg2); \
4118 _argvec[3] = (unsigned long)(arg3); \
4119 _argvec[4] = (unsigned long)(arg4); \
4120 _argvec[5] = (unsigned long)(arg5); \
4121 _argvec[6] = (unsigned long)(arg6); \
4122 _argvec[7] = (unsigned long)(arg7); \
4123 _argvec[8] = (unsigned long)(arg8); \
4124 _argvec[9] = (unsigned long)(arg9); \
4125 _argvec[10] = (unsigned long)(arg10); \
4126 _argvec[11] = (unsigned long)(arg11); \
4127 __asm__ volatile( \
4128 VALGRIND_ALIGN_STACK \
4129 "sub sp, sp, #4 \n\t" \
4130 "ldr r0, [%1, #40] \n\t" \
4131 "ldr r1, [%1, #44] \n\t" \
4132 "push {r0, r1} \n\t" \
4133 "ldr r0, [%1, #20] \n\t" \
4134 "ldr r1, [%1, #24] \n\t" \
4135 "ldr r2, [%1, #28] \n\t" \
4136 "ldr r3, [%1, #32] \n\t" \
4137 "ldr r4, [%1, #36] \n\t" \
4138 "push {r0, r1, r2, r3, r4} \n\t" \
4139 "ldr r0, [%1, #4] \n\t" \
4140 "ldr r1, [%1, #8] \n\t" \
4141 "ldr r2, [%1, #12] \n\t" \
4142 "ldr r3, [%1, #16] \n\t" \
4143 "ldr r4, [%1] \n\t" /* target->r4 */ \
4144 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4145 VALGRIND_RESTORE_STACK \
4146 "mov %0, r0" \
4147 : /*out*/ "=r" (_res) \
4148 : /*in*/ "0" (&_argvec[0]) \
4149 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4150 ); \
4151 lval = (__typeof__(lval)) _res; \
4152 } while (0)
4153
4154#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4155 arg6,arg7,arg8,arg9,arg10, \
4156 arg11,arg12) \
4157 do { \
4158 volatile OrigFn _orig = (orig); \
4159 volatile unsigned long _argvec[13]; \
4160 volatile unsigned long _res; \
4161 _argvec[0] = (unsigned long)_orig.nraddr; \
4162 _argvec[1] = (unsigned long)(arg1); \
4163 _argvec[2] = (unsigned long)(arg2); \
4164 _argvec[3] = (unsigned long)(arg3); \
4165 _argvec[4] = (unsigned long)(arg4); \
4166 _argvec[5] = (unsigned long)(arg5); \
4167 _argvec[6] = (unsigned long)(arg6); \
4168 _argvec[7] = (unsigned long)(arg7); \
4169 _argvec[8] = (unsigned long)(arg8); \
4170 _argvec[9] = (unsigned long)(arg9); \
4171 _argvec[10] = (unsigned long)(arg10); \
4172 _argvec[11] = (unsigned long)(arg11); \
4173 _argvec[12] = (unsigned long)(arg12); \
4174 __asm__ volatile( \
4175 VALGRIND_ALIGN_STACK \
4176 "ldr r0, [%1, #40] \n\t" \
4177 "ldr r1, [%1, #44] \n\t" \
4178 "ldr r2, [%1, #48] \n\t" \
4179 "push {r0, r1, r2} \n\t" \
4180 "ldr r0, [%1, #20] \n\t" \
4181 "ldr r1, [%1, #24] \n\t" \
4182 "ldr r2, [%1, #28] \n\t" \
4183 "ldr r3, [%1, #32] \n\t" \
4184 "ldr r4, [%1, #36] \n\t" \
4185 "push {r0, r1, r2, r3, r4} \n\t" \
4186 "ldr r0, [%1, #4] \n\t" \
4187 "ldr r1, [%1, #8] \n\t" \
4188 "ldr r2, [%1, #12] \n\t" \
4189 "ldr r3, [%1, #16] \n\t" \
4190 "ldr r4, [%1] \n\t" /* target->r4 */ \
4191 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4192 VALGRIND_RESTORE_STACK \
4193 "mov %0, r0" \
4194 : /*out*/ "=r" (_res) \
4195 : /*in*/ "0" (&_argvec[0]) \
4196 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4197 ); \
4198 lval = (__typeof__(lval)) _res; \
4199 } while (0)
4200
4201#endif /* PLAT_arm_linux */
4202
4203/* ------------------------ arm64-linux ------------------------ */
4204
4205#if defined(PLAT_arm64_linux)
4206
4207/* These regs are trashed by the hidden call. */
4208#define __CALLER_SAVED_REGS \
4209 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4210 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4211 "x18", "x19", "x20", "x30", \
4212 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4213 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4214 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4215 "v26", "v27", "v28", "v29", "v30", "v31"
4216
4217/* x21 is callee-saved, so we can use it to save and restore SP around
4218 the hidden call. */
4219#define VALGRIND_ALIGN_STACK \
4220 "mov x21, sp\n\t" \
4221 "bic sp, x21, #15\n\t"
4222#define VALGRIND_RESTORE_STACK \
4223 "mov sp, x21\n\t"
4224
4225/* These CALL_FN_ macros assume that on arm64-linux,
4226 sizeof(unsigned long) == 8. */
4227
4228#define CALL_FN_W_v(lval, orig) \
4229 do { \
4230 volatile OrigFn _orig = (orig); \
4231 volatile unsigned long _argvec[1]; \
4232 volatile unsigned long _res; \
4233 _argvec[0] = (unsigned long)_orig.nraddr; \
4234 __asm__ volatile( \
4235 VALGRIND_ALIGN_STACK \
4236 "ldr x8, [%1] \n\t" /* target->x8 */ \
4237 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4238 VALGRIND_RESTORE_STACK \
4239 "mov %0, x0\n" \
4240 : /*out*/ "=r" (_res) \
4241 : /*in*/ "0" (&_argvec[0]) \
4242 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4243 ); \
4244 lval = (__typeof__(lval)) _res; \
4245 } while (0)
4246
4247#define CALL_FN_W_W(lval, orig, arg1) \
4248 do { \
4249 volatile OrigFn _orig = (orig); \
4250 volatile unsigned long _argvec[2]; \
4251 volatile unsigned long _res; \
4252 _argvec[0] = (unsigned long)_orig.nraddr; \
4253 _argvec[1] = (unsigned long)(arg1); \
4254 __asm__ volatile( \
4255 VALGRIND_ALIGN_STACK \
4256 "ldr x0, [%1, #8] \n\t" \
4257 "ldr x8, [%1] \n\t" /* target->x8 */ \
4258 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4259 VALGRIND_RESTORE_STACK \
4260 "mov %0, x0\n" \
4261 : /*out*/ "=r" (_res) \
4262 : /*in*/ "0" (&_argvec[0]) \
4263 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4264 ); \
4265 lval = (__typeof__(lval)) _res; \
4266 } while (0)
4267
4268#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4269 do { \
4270 volatile OrigFn _orig = (orig); \
4271 volatile unsigned long _argvec[3]; \
4272 volatile unsigned long _res; \
4273 _argvec[0] = (unsigned long)_orig.nraddr; \
4274 _argvec[1] = (unsigned long)(arg1); \
4275 _argvec[2] = (unsigned long)(arg2); \
4276 __asm__ volatile( \
4277 VALGRIND_ALIGN_STACK \
4278 "ldr x0, [%1, #8] \n\t" \
4279 "ldr x1, [%1, #16] \n\t" \
4280 "ldr x8, [%1] \n\t" /* target->x8 */ \
4281 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4282 VALGRIND_RESTORE_STACK \
4283 "mov %0, x0\n" \
4284 : /*out*/ "=r" (_res) \
4285 : /*in*/ "0" (&_argvec[0]) \
4286 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4287 ); \
4288 lval = (__typeof__(lval)) _res; \
4289 } while (0)
4290
4291#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4292 do { \
4293 volatile OrigFn _orig = (orig); \
4294 volatile unsigned long _argvec[4]; \
4295 volatile unsigned long _res; \
4296 _argvec[0] = (unsigned long)_orig.nraddr; \
4297 _argvec[1] = (unsigned long)(arg1); \
4298 _argvec[2] = (unsigned long)(arg2); \
4299 _argvec[3] = (unsigned long)(arg3); \
4300 __asm__ volatile( \
4301 VALGRIND_ALIGN_STACK \
4302 "ldr x0, [%1, #8] \n\t" \
4303 "ldr x1, [%1, #16] \n\t" \
4304 "ldr x2, [%1, #24] \n\t" \
4305 "ldr x8, [%1] \n\t" /* target->x8 */ \
4306 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4307 VALGRIND_RESTORE_STACK \
4308 "mov %0, x0\n" \
4309 : /*out*/ "=r" (_res) \
4310 : /*in*/ "0" (&_argvec[0]) \
4311 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4312 ); \
4313 lval = (__typeof__(lval)) _res; \
4314 } while (0)
4315
4316#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4317 do { \
4318 volatile OrigFn _orig = (orig); \
4319 volatile unsigned long _argvec[5]; \
4320 volatile unsigned long _res; \
4321 _argvec[0] = (unsigned long)_orig.nraddr; \
4322 _argvec[1] = (unsigned long)(arg1); \
4323 _argvec[2] = (unsigned long)(arg2); \
4324 _argvec[3] = (unsigned long)(arg3); \
4325 _argvec[4] = (unsigned long)(arg4); \
4326 __asm__ volatile( \
4327 VALGRIND_ALIGN_STACK \
4328 "ldr x0, [%1, #8] \n\t" \
4329 "ldr x1, [%1, #16] \n\t" \
4330 "ldr x2, [%1, #24] \n\t" \
4331 "ldr x3, [%1, #32] \n\t" \
4332 "ldr x8, [%1] \n\t" /* target->x8 */ \
4333 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4334 VALGRIND_RESTORE_STACK \
4335 "mov %0, x0" \
4336 : /*out*/ "=r" (_res) \
4337 : /*in*/ "0" (&_argvec[0]) \
4338 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4339 ); \
4340 lval = (__typeof__(lval)) _res; \
4341 } while (0)
4342
4343#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4344 do { \
4345 volatile OrigFn _orig = (orig); \
4346 volatile unsigned long _argvec[6]; \
4347 volatile unsigned long _res; \
4348 _argvec[0] = (unsigned long)_orig.nraddr; \
4349 _argvec[1] = (unsigned long)(arg1); \
4350 _argvec[2] = (unsigned long)(arg2); \
4351 _argvec[3] = (unsigned long)(arg3); \
4352 _argvec[4] = (unsigned long)(arg4); \
4353 _argvec[5] = (unsigned long)(arg5); \
4354 __asm__ volatile( \
4355 VALGRIND_ALIGN_STACK \
4356 "ldr x0, [%1, #8] \n\t" \
4357 "ldr x1, [%1, #16] \n\t" \
4358 "ldr x2, [%1, #24] \n\t" \
4359 "ldr x3, [%1, #32] \n\t" \
4360 "ldr x4, [%1, #40] \n\t" \
4361 "ldr x8, [%1] \n\t" /* target->x8 */ \
4362 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4363 VALGRIND_RESTORE_STACK \
4364 "mov %0, x0" \
4365 : /*out*/ "=r" (_res) \
4366 : /*in*/ "0" (&_argvec[0]) \
4367 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4368 ); \
4369 lval = (__typeof__(lval)) _res; \
4370 } while (0)
4371
4372#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4373 do { \
4374 volatile OrigFn _orig = (orig); \
4375 volatile unsigned long _argvec[7]; \
4376 volatile unsigned long _res; \
4377 _argvec[0] = (unsigned long)_orig.nraddr; \
4378 _argvec[1] = (unsigned long)(arg1); \
4379 _argvec[2] = (unsigned long)(arg2); \
4380 _argvec[3] = (unsigned long)(arg3); \
4381 _argvec[4] = (unsigned long)(arg4); \
4382 _argvec[5] = (unsigned long)(arg5); \
4383 _argvec[6] = (unsigned long)(arg6); \
4384 __asm__ volatile( \
4385 VALGRIND_ALIGN_STACK \
4386 "ldr x0, [%1, #8] \n\t" \
4387 "ldr x1, [%1, #16] \n\t" \
4388 "ldr x2, [%1, #24] \n\t" \
4389 "ldr x3, [%1, #32] \n\t" \
4390 "ldr x4, [%1, #40] \n\t" \
4391 "ldr x5, [%1, #48] \n\t" \
4392 "ldr x8, [%1] \n\t" /* target->x8 */ \
4393 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4394 VALGRIND_RESTORE_STACK \
4395 "mov %0, x0" \
4396 : /*out*/ "=r" (_res) \
4397 : /*in*/ "0" (&_argvec[0]) \
4398 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4399 ); \
4400 lval = (__typeof__(lval)) _res; \
4401 } while (0)
4402
4403#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4404 arg7) \
4405 do { \
4406 volatile OrigFn _orig = (orig); \
4407 volatile unsigned long _argvec[8]; \
4408 volatile unsigned long _res; \
4409 _argvec[0] = (unsigned long)_orig.nraddr; \
4410 _argvec[1] = (unsigned long)(arg1); \
4411 _argvec[2] = (unsigned long)(arg2); \
4412 _argvec[3] = (unsigned long)(arg3); \
4413 _argvec[4] = (unsigned long)(arg4); \
4414 _argvec[5] = (unsigned long)(arg5); \
4415 _argvec[6] = (unsigned long)(arg6); \
4416 _argvec[7] = (unsigned long)(arg7); \
4417 __asm__ volatile( \
4418 VALGRIND_ALIGN_STACK \
4419 "ldr x0, [%1, #8] \n\t" \
4420 "ldr x1, [%1, #16] \n\t" \
4421 "ldr x2, [%1, #24] \n\t" \
4422 "ldr x3, [%1, #32] \n\t" \
4423 "ldr x4, [%1, #40] \n\t" \
4424 "ldr x5, [%1, #48] \n\t" \
4425 "ldr x6, [%1, #56] \n\t" \
4426 "ldr x8, [%1] \n\t" /* target->x8 */ \
4427 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4428 VALGRIND_RESTORE_STACK \
4429 "mov %0, x0" \
4430 : /*out*/ "=r" (_res) \
4431 : /*in*/ "0" (&_argvec[0]) \
4432 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4433 ); \
4434 lval = (__typeof__(lval)) _res; \
4435 } while (0)
4436
4437#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4438 arg7,arg8) \
4439 do { \
4440 volatile OrigFn _orig = (orig); \
4441 volatile unsigned long _argvec[9]; \
4442 volatile unsigned long _res; \
4443 _argvec[0] = (unsigned long)_orig.nraddr; \
4444 _argvec[1] = (unsigned long)(arg1); \
4445 _argvec[2] = (unsigned long)(arg2); \
4446 _argvec[3] = (unsigned long)(arg3); \
4447 _argvec[4] = (unsigned long)(arg4); \
4448 _argvec[5] = (unsigned long)(arg5); \
4449 _argvec[6] = (unsigned long)(arg6); \
4450 _argvec[7] = (unsigned long)(arg7); \
4451 _argvec[8] = (unsigned long)(arg8); \
4452 __asm__ volatile( \
4453 VALGRIND_ALIGN_STACK \
4454 "ldr x0, [%1, #8] \n\t" \
4455 "ldr x1, [%1, #16] \n\t" \
4456 "ldr x2, [%1, #24] \n\t" \
4457 "ldr x3, [%1, #32] \n\t" \
4458 "ldr x4, [%1, #40] \n\t" \
4459 "ldr x5, [%1, #48] \n\t" \
4460 "ldr x6, [%1, #56] \n\t" \
4461 "ldr x7, [%1, #64] \n\t" \
4462 "ldr x8, [%1] \n\t" /* target->x8 */ \
4463 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4464 VALGRIND_RESTORE_STACK \
4465 "mov %0, x0" \
4466 : /*out*/ "=r" (_res) \
4467 : /*in*/ "0" (&_argvec[0]) \
4468 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4469 ); \
4470 lval = (__typeof__(lval)) _res; \
4471 } while (0)
4472
4473#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4474 arg7,arg8,arg9) \
4475 do { \
4476 volatile OrigFn _orig = (orig); \
4477 volatile unsigned long _argvec[10]; \
4478 volatile unsigned long _res; \
4479 _argvec[0] = (unsigned long)_orig.nraddr; \
4480 _argvec[1] = (unsigned long)(arg1); \
4481 _argvec[2] = (unsigned long)(arg2); \
4482 _argvec[3] = (unsigned long)(arg3); \
4483 _argvec[4] = (unsigned long)(arg4); \
4484 _argvec[5] = (unsigned long)(arg5); \
4485 _argvec[6] = (unsigned long)(arg6); \
4486 _argvec[7] = (unsigned long)(arg7); \
4487 _argvec[8] = (unsigned long)(arg8); \
4488 _argvec[9] = (unsigned long)(arg9); \
4489 __asm__ volatile( \
4490 VALGRIND_ALIGN_STACK \
4491 "sub sp, sp, #0x20 \n\t" \
4492 "ldr x0, [%1, #8] \n\t" \
4493 "ldr x1, [%1, #16] \n\t" \
4494 "ldr x2, [%1, #24] \n\t" \
4495 "ldr x3, [%1, #32] \n\t" \
4496 "ldr x4, [%1, #40] \n\t" \
4497 "ldr x5, [%1, #48] \n\t" \
4498 "ldr x6, [%1, #56] \n\t" \
4499 "ldr x7, [%1, #64] \n\t" \
4500 "ldr x8, [%1, #72] \n\t" \
4501 "str x8, [sp, #0] \n\t" \
4502 "ldr x8, [%1] \n\t" /* target->x8 */ \
4503 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4504 VALGRIND_RESTORE_STACK \
4505 "mov %0, x0" \
4506 : /*out*/ "=r" (_res) \
4507 : /*in*/ "0" (&_argvec[0]) \
4508 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4509 ); \
4510 lval = (__typeof__(lval)) _res; \
4511 } while (0)
4512
4513#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4514 arg7,arg8,arg9,arg10) \
4515 do { \
4516 volatile OrigFn _orig = (orig); \
4517 volatile unsigned long _argvec[11]; \
4518 volatile unsigned long _res; \
4519 _argvec[0] = (unsigned long)_orig.nraddr; \
4520 _argvec[1] = (unsigned long)(arg1); \
4521 _argvec[2] = (unsigned long)(arg2); \
4522 _argvec[3] = (unsigned long)(arg3); \
4523 _argvec[4] = (unsigned long)(arg4); \
4524 _argvec[5] = (unsigned long)(arg5); \
4525 _argvec[6] = (unsigned long)(arg6); \
4526 _argvec[7] = (unsigned long)(arg7); \
4527 _argvec[8] = (unsigned long)(arg8); \
4528 _argvec[9] = (unsigned long)(arg9); \
4529 _argvec[10] = (unsigned long)(arg10); \
4530 __asm__ volatile( \
4531 VALGRIND_ALIGN_STACK \
4532 "sub sp, sp, #0x20 \n\t" \
4533 "ldr x0, [%1, #8] \n\t" \
4534 "ldr x1, [%1, #16] \n\t" \
4535 "ldr x2, [%1, #24] \n\t" \
4536 "ldr x3, [%1, #32] \n\t" \
4537 "ldr x4, [%1, #40] \n\t" \
4538 "ldr x5, [%1, #48] \n\t" \
4539 "ldr x6, [%1, #56] \n\t" \
4540 "ldr x7, [%1, #64] \n\t" \
4541 "ldr x8, [%1, #72] \n\t" \
4542 "str x8, [sp, #0] \n\t" \
4543 "ldr x8, [%1, #80] \n\t" \
4544 "str x8, [sp, #8] \n\t" \
4545 "ldr x8, [%1] \n\t" /* target->x8 */ \
4546 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4547 VALGRIND_RESTORE_STACK \
4548 "mov %0, x0" \
4549 : /*out*/ "=r" (_res) \
4550 : /*in*/ "0" (&_argvec[0]) \
4551 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4552 ); \
4553 lval = (__typeof__(lval)) _res; \
4554 } while (0)
4555
4556#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4557 arg7,arg8,arg9,arg10,arg11) \
4558 do { \
4559 volatile OrigFn _orig = (orig); \
4560 volatile unsigned long _argvec[12]; \
4561 volatile unsigned long _res; \
4562 _argvec[0] = (unsigned long)_orig.nraddr; \
4563 _argvec[1] = (unsigned long)(arg1); \
4564 _argvec[2] = (unsigned long)(arg2); \
4565 _argvec[3] = (unsigned long)(arg3); \
4566 _argvec[4] = (unsigned long)(arg4); \
4567 _argvec[5] = (unsigned long)(arg5); \
4568 _argvec[6] = (unsigned long)(arg6); \
4569 _argvec[7] = (unsigned long)(arg7); \
4570 _argvec[8] = (unsigned long)(arg8); \
4571 _argvec[9] = (unsigned long)(arg9); \
4572 _argvec[10] = (unsigned long)(arg10); \
4573 _argvec[11] = (unsigned long)(arg11); \
4574 __asm__ volatile( \
4575 VALGRIND_ALIGN_STACK \
4576 "sub sp, sp, #0x30 \n\t" \
4577 "ldr x0, [%1, #8] \n\t" \
4578 "ldr x1, [%1, #16] \n\t" \
4579 "ldr x2, [%1, #24] \n\t" \
4580 "ldr x3, [%1, #32] \n\t" \
4581 "ldr x4, [%1, #40] \n\t" \
4582 "ldr x5, [%1, #48] \n\t" \
4583 "ldr x6, [%1, #56] \n\t" \
4584 "ldr x7, [%1, #64] \n\t" \
4585 "ldr x8, [%1, #72] \n\t" \
4586 "str x8, [sp, #0] \n\t" \
4587 "ldr x8, [%1, #80] \n\t" \
4588 "str x8, [sp, #8] \n\t" \
4589 "ldr x8, [%1, #88] \n\t" \
4590 "str x8, [sp, #16] \n\t" \
4591 "ldr x8, [%1] \n\t" /* target->x8 */ \
4592 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4593 VALGRIND_RESTORE_STACK \
4594 "mov %0, x0" \
4595 : /*out*/ "=r" (_res) \
4596 : /*in*/ "0" (&_argvec[0]) \
4597 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4598 ); \
4599 lval = (__typeof__(lval)) _res; \
4600 } while (0)
4601
4602#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4603 arg7,arg8,arg9,arg10,arg11, \
4604 arg12) \
4605 do { \
4606 volatile OrigFn _orig = (orig); \
4607 volatile unsigned long _argvec[13]; \
4608 volatile unsigned long _res; \
4609 _argvec[0] = (unsigned long)_orig.nraddr; \
4610 _argvec[1] = (unsigned long)(arg1); \
4611 _argvec[2] = (unsigned long)(arg2); \
4612 _argvec[3] = (unsigned long)(arg3); \
4613 _argvec[4] = (unsigned long)(arg4); \
4614 _argvec[5] = (unsigned long)(arg5); \
4615 _argvec[6] = (unsigned long)(arg6); \
4616 _argvec[7] = (unsigned long)(arg7); \
4617 _argvec[8] = (unsigned long)(arg8); \
4618 _argvec[9] = (unsigned long)(arg9); \
4619 _argvec[10] = (unsigned long)(arg10); \
4620 _argvec[11] = (unsigned long)(arg11); \
4621 _argvec[12] = (unsigned long)(arg12); \
4622 __asm__ volatile( \
4623 VALGRIND_ALIGN_STACK \
4624 "sub sp, sp, #0x30 \n\t" \
4625 "ldr x0, [%1, #8] \n\t" \
4626 "ldr x1, [%1, #16] \n\t" \
4627 "ldr x2, [%1, #24] \n\t" \
4628 "ldr x3, [%1, #32] \n\t" \
4629 "ldr x4, [%1, #40] \n\t" \
4630 "ldr x5, [%1, #48] \n\t" \
4631 "ldr x6, [%1, #56] \n\t" \
4632 "ldr x7, [%1, #64] \n\t" \
4633 "ldr x8, [%1, #72] \n\t" \
4634 "str x8, [sp, #0] \n\t" \
4635 "ldr x8, [%1, #80] \n\t" \
4636 "str x8, [sp, #8] \n\t" \
4637 "ldr x8, [%1, #88] \n\t" \
4638 "str x8, [sp, #16] \n\t" \
4639 "ldr x8, [%1, #96] \n\t" \
4640 "str x8, [sp, #24] \n\t" \
4641 "ldr x8, [%1] \n\t" /* target->x8 */ \
4642 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4643 VALGRIND_RESTORE_STACK \
4644 "mov %0, x0" \
4645 : /*out*/ "=r" (_res) \
4646 : /*in*/ "0" (&_argvec[0]) \
4647 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4648 ); \
4649 lval = (__typeof__(lval)) _res; \
4650 } while (0)
4651
4652#endif /* PLAT_arm64_linux */
4653
4654/* ------------------------- s390x-linux ------------------------- */
4655
4656#if defined(PLAT_s390x_linux)
4657
4658/* Similar workaround as amd64 (see above), but we use r11 as frame
4659 pointer and save the old r11 in r7. r11 might be used for
4660 argvec, therefore we copy argvec in r1 since r1 is clobbered
4661 after the call anyway. */
4662#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4663# define __FRAME_POINTER \
4664 ,"d"(__builtin_dwarf_cfa())
4665# define VALGRIND_CFI_PROLOGUE \
4666 ".cfi_remember_state\n\t" \
4667 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4668 "lgr 7,11\n\t" \
4669 "lgr 11,%2\n\t" \
4670 ".cfi_def_cfa r11, 0\n\t"
4671# define VALGRIND_CFI_EPILOGUE \
4672 "lgr 11, 7\n\t" \
4673 ".cfi_restore_state\n\t"
4674#else
4675# define __FRAME_POINTER
4676# define VALGRIND_CFI_PROLOGUE \
4677 "lgr 1,%1\n\t"
4678# define VALGRIND_CFI_EPILOGUE
4679#endif
4680
4681/* Nb: On s390 the stack pointer is properly aligned *at all times*
4682 according to the s390 GCC maintainer. (The ABI specification is not
4683 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4684 VALGRIND_RESTORE_STACK are not defined here. */
4685
4686/* These regs are trashed by the hidden call. Note that we overwrite
4687 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4688 function a proper return address. All others are ABI defined call
4689 clobbers. */
4690#define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4691 "f0","f1","f2","f3","f4","f5","f6","f7"
4692
4693/* Nb: Although r11 is modified in the asm snippets below (inside
4694 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4695 two reasons:
4696 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4697 modified
4698 (2) GCC will complain that r11 cannot appear inside a clobber section,
4699 when compiled with -O -fno-omit-frame-pointer
4700 */
4701
4702#define CALL_FN_W_v(lval, orig) \
4703 do { \
4704 volatile OrigFn _orig = (orig); \
4705 volatile unsigned long _argvec[1]; \
4706 volatile unsigned long _res; \
4707 _argvec[0] = (unsigned long)_orig.nraddr; \
4708 __asm__ volatile( \
4709 VALGRIND_CFI_PROLOGUE \
4710 "aghi 15,-160\n\t" \
4711 "lg 1, 0(1)\n\t" /* target->r1 */ \
4712 VALGRIND_CALL_NOREDIR_R1 \
4713 "lgr %0, 2\n\t" \
4714 "aghi 15,160\n\t" \
4715 VALGRIND_CFI_EPILOGUE \
4716 : /*out*/ "=d" (_res) \
4717 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4718 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4719 ); \
4720 lval = (__typeof__(lval)) _res; \
4721 } while (0)
4722
4723/* The call abi has the arguments in r2-r6 and stack */
4724#define CALL_FN_W_W(lval, orig, arg1) \
4725 do { \
4726 volatile OrigFn _orig = (orig); \
4727 volatile unsigned long _argvec[2]; \
4728 volatile unsigned long _res; \
4729 _argvec[0] = (unsigned long)_orig.nraddr; \
4730 _argvec[1] = (unsigned long)arg1; \
4731 __asm__ volatile( \
4732 VALGRIND_CFI_PROLOGUE \
4733 "aghi 15,-160\n\t" \
4734 "lg 2, 8(1)\n\t" \
4735 "lg 1, 0(1)\n\t" \
4736 VALGRIND_CALL_NOREDIR_R1 \
4737 "lgr %0, 2\n\t" \
4738 "aghi 15,160\n\t" \
4739 VALGRIND_CFI_EPILOGUE \
4740 : /*out*/ "=d" (_res) \
4741 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4742 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4743 ); \
4744 lval = (__typeof__(lval)) _res; \
4745 } while (0)
4746
4747#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4748 do { \
4749 volatile OrigFn _orig = (orig); \
4750 volatile unsigned long _argvec[3]; \
4751 volatile unsigned long _res; \
4752 _argvec[0] = (unsigned long)_orig.nraddr; \
4753 _argvec[1] = (unsigned long)arg1; \
4754 _argvec[2] = (unsigned long)arg2; \
4755 __asm__ volatile( \
4756 VALGRIND_CFI_PROLOGUE \
4757 "aghi 15,-160\n\t" \
4758 "lg 2, 8(1)\n\t" \
4759 "lg 3,16(1)\n\t" \
4760 "lg 1, 0(1)\n\t" \
4761 VALGRIND_CALL_NOREDIR_R1 \
4762 "lgr %0, 2\n\t" \
4763 "aghi 15,160\n\t" \
4764 VALGRIND_CFI_EPILOGUE \
4765 : /*out*/ "=d" (_res) \
4766 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4767 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4768 ); \
4769 lval = (__typeof__(lval)) _res; \
4770 } while (0)
4771
4772#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4773 do { \
4774 volatile OrigFn _orig = (orig); \
4775 volatile unsigned long _argvec[4]; \
4776 volatile unsigned long _res; \
4777 _argvec[0] = (unsigned long)_orig.nraddr; \
4778 _argvec[1] = (unsigned long)arg1; \
4779 _argvec[2] = (unsigned long)arg2; \
4780 _argvec[3] = (unsigned long)arg3; \
4781 __asm__ volatile( \
4782 VALGRIND_CFI_PROLOGUE \
4783 "aghi 15,-160\n\t" \
4784 "lg 2, 8(1)\n\t" \
4785 "lg 3,16(1)\n\t" \
4786 "lg 4,24(1)\n\t" \
4787 "lg 1, 0(1)\n\t" \
4788 VALGRIND_CALL_NOREDIR_R1 \
4789 "lgr %0, 2\n\t" \
4790 "aghi 15,160\n\t" \
4791 VALGRIND_CFI_EPILOGUE \
4792 : /*out*/ "=d" (_res) \
4793 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4794 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4795 ); \
4796 lval = (__typeof__(lval)) _res; \
4797 } while (0)
4798
4799#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4800 do { \
4801 volatile OrigFn _orig = (orig); \
4802 volatile unsigned long _argvec[5]; \
4803 volatile unsigned long _res; \
4804 _argvec[0] = (unsigned long)_orig.nraddr; \
4805 _argvec[1] = (unsigned long)arg1; \
4806 _argvec[2] = (unsigned long)arg2; \
4807 _argvec[3] = (unsigned long)arg3; \
4808 _argvec[4] = (unsigned long)arg4; \
4809 __asm__ volatile( \
4810 VALGRIND_CFI_PROLOGUE \
4811 "aghi 15,-160\n\t" \
4812 "lg 2, 8(1)\n\t" \
4813 "lg 3,16(1)\n\t" \
4814 "lg 4,24(1)\n\t" \
4815 "lg 5,32(1)\n\t" \
4816 "lg 1, 0(1)\n\t" \
4817 VALGRIND_CALL_NOREDIR_R1 \
4818 "lgr %0, 2\n\t" \
4819 "aghi 15,160\n\t" \
4820 VALGRIND_CFI_EPILOGUE \
4821 : /*out*/ "=d" (_res) \
4822 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4823 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4824 ); \
4825 lval = (__typeof__(lval)) _res; \
4826 } while (0)
4827
4828#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4829 do { \
4830 volatile OrigFn _orig = (orig); \
4831 volatile unsigned long _argvec[6]; \
4832 volatile unsigned long _res; \
4833 _argvec[0] = (unsigned long)_orig.nraddr; \
4834 _argvec[1] = (unsigned long)arg1; \
4835 _argvec[2] = (unsigned long)arg2; \
4836 _argvec[3] = (unsigned long)arg3; \
4837 _argvec[4] = (unsigned long)arg4; \
4838 _argvec[5] = (unsigned long)arg5; \
4839 __asm__ volatile( \
4840 VALGRIND_CFI_PROLOGUE \
4841 "aghi 15,-160\n\t" \
4842 "lg 2, 8(1)\n\t" \
4843 "lg 3,16(1)\n\t" \
4844 "lg 4,24(1)\n\t" \
4845 "lg 5,32(1)\n\t" \
4846 "lg 6,40(1)\n\t" \
4847 "lg 1, 0(1)\n\t" \
4848 VALGRIND_CALL_NOREDIR_R1 \
4849 "lgr %0, 2\n\t" \
4850 "aghi 15,160\n\t" \
4851 VALGRIND_CFI_EPILOGUE \
4852 : /*out*/ "=d" (_res) \
4853 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4854 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4855 ); \
4856 lval = (__typeof__(lval)) _res; \
4857 } while (0)
4858
4859#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4860 arg6) \
4861 do { \
4862 volatile OrigFn _orig = (orig); \
4863 volatile unsigned long _argvec[7]; \
4864 volatile unsigned long _res; \
4865 _argvec[0] = (unsigned long)_orig.nraddr; \
4866 _argvec[1] = (unsigned long)arg1; \
4867 _argvec[2] = (unsigned long)arg2; \
4868 _argvec[3] = (unsigned long)arg3; \
4869 _argvec[4] = (unsigned long)arg4; \
4870 _argvec[5] = (unsigned long)arg5; \
4871 _argvec[6] = (unsigned long)arg6; \
4872 __asm__ volatile( \
4873 VALGRIND_CFI_PROLOGUE \
4874 "aghi 15,-168\n\t" \
4875 "lg 2, 8(1)\n\t" \
4876 "lg 3,16(1)\n\t" \
4877 "lg 4,24(1)\n\t" \
4878 "lg 5,32(1)\n\t" \
4879 "lg 6,40(1)\n\t" \
4880 "mvc 160(8,15), 48(1)\n\t" \
4881 "lg 1, 0(1)\n\t" \
4882 VALGRIND_CALL_NOREDIR_R1 \
4883 "lgr %0, 2\n\t" \
4884 "aghi 15,168\n\t" \
4885 VALGRIND_CFI_EPILOGUE \
4886 : /*out*/ "=d" (_res) \
4887 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4888 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4889 ); \
4890 lval = (__typeof__(lval)) _res; \
4891 } while (0)
4892
4893#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4894 arg6, arg7) \
4895 do { \
4896 volatile OrigFn _orig = (orig); \
4897 volatile unsigned long _argvec[8]; \
4898 volatile unsigned long _res; \
4899 _argvec[0] = (unsigned long)_orig.nraddr; \
4900 _argvec[1] = (unsigned long)arg1; \
4901 _argvec[2] = (unsigned long)arg2; \
4902 _argvec[3] = (unsigned long)arg3; \
4903 _argvec[4] = (unsigned long)arg4; \
4904 _argvec[5] = (unsigned long)arg5; \
4905 _argvec[6] = (unsigned long)arg6; \
4906 _argvec[7] = (unsigned long)arg7; \
4907 __asm__ volatile( \
4908 VALGRIND_CFI_PROLOGUE \
4909 "aghi 15,-176\n\t" \
4910 "lg 2, 8(1)\n\t" \
4911 "lg 3,16(1)\n\t" \
4912 "lg 4,24(1)\n\t" \
4913 "lg 5,32(1)\n\t" \
4914 "lg 6,40(1)\n\t" \
4915 "mvc 160(8,15), 48(1)\n\t" \
4916 "mvc 168(8,15), 56(1)\n\t" \
4917 "lg 1, 0(1)\n\t" \
4918 VALGRIND_CALL_NOREDIR_R1 \
4919 "lgr %0, 2\n\t" \
4920 "aghi 15,176\n\t" \
4921 VALGRIND_CFI_EPILOGUE \
4922 : /*out*/ "=d" (_res) \
4923 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4924 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4925 ); \
4926 lval = (__typeof__(lval)) _res; \
4927 } while (0)
4928
4929#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4930 arg6, arg7 ,arg8) \
4931 do { \
4932 volatile OrigFn _orig = (orig); \
4933 volatile unsigned long _argvec[9]; \
4934 volatile unsigned long _res; \
4935 _argvec[0] = (unsigned long)_orig.nraddr; \
4936 _argvec[1] = (unsigned long)arg1; \
4937 _argvec[2] = (unsigned long)arg2; \
4938 _argvec[3] = (unsigned long)arg3; \
4939 _argvec[4] = (unsigned long)arg4; \
4940 _argvec[5] = (unsigned long)arg5; \
4941 _argvec[6] = (unsigned long)arg6; \
4942 _argvec[7] = (unsigned long)arg7; \
4943 _argvec[8] = (unsigned long)arg8; \
4944 __asm__ volatile( \
4945 VALGRIND_CFI_PROLOGUE \
4946 "aghi 15,-184\n\t" \
4947 "lg 2, 8(1)\n\t" \
4948 "lg 3,16(1)\n\t" \
4949 "lg 4,24(1)\n\t" \
4950 "lg 5,32(1)\n\t" \
4951 "lg 6,40(1)\n\t" \
4952 "mvc 160(8,15), 48(1)\n\t" \
4953 "mvc 168(8,15), 56(1)\n\t" \
4954 "mvc 176(8,15), 64(1)\n\t" \
4955 "lg 1, 0(1)\n\t" \
4956 VALGRIND_CALL_NOREDIR_R1 \
4957 "lgr %0, 2\n\t" \
4958 "aghi 15,184\n\t" \
4959 VALGRIND_CFI_EPILOGUE \
4960 : /*out*/ "=d" (_res) \
4961 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4962 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4963 ); \
4964 lval = (__typeof__(lval)) _res; \
4965 } while (0)
4966
4967#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4968 arg6, arg7 ,arg8, arg9) \
4969 do { \
4970 volatile OrigFn _orig = (orig); \
4971 volatile unsigned long _argvec[10]; \
4972 volatile unsigned long _res; \
4973 _argvec[0] = (unsigned long)_orig.nraddr; \
4974 _argvec[1] = (unsigned long)arg1; \
4975 _argvec[2] = (unsigned long)arg2; \
4976 _argvec[3] = (unsigned long)arg3; \
4977 _argvec[4] = (unsigned long)arg4; \
4978 _argvec[5] = (unsigned long)arg5; \
4979 _argvec[6] = (unsigned long)arg6; \
4980 _argvec[7] = (unsigned long)arg7; \
4981 _argvec[8] = (unsigned long)arg8; \
4982 _argvec[9] = (unsigned long)arg9; \
4983 __asm__ volatile( \
4984 VALGRIND_CFI_PROLOGUE \
4985 "aghi 15,-192\n\t" \
4986 "lg 2, 8(1)\n\t" \
4987 "lg 3,16(1)\n\t" \
4988 "lg 4,24(1)\n\t" \
4989 "lg 5,32(1)\n\t" \
4990 "lg 6,40(1)\n\t" \
4991 "mvc 160(8,15), 48(1)\n\t" \
4992 "mvc 168(8,15), 56(1)\n\t" \
4993 "mvc 176(8,15), 64(1)\n\t" \
4994 "mvc 184(8,15), 72(1)\n\t" \
4995 "lg 1, 0(1)\n\t" \
4996 VALGRIND_CALL_NOREDIR_R1 \
4997 "lgr %0, 2\n\t" \
4998 "aghi 15,192\n\t" \
4999 VALGRIND_CFI_EPILOGUE \
5000 : /*out*/ "=d" (_res) \
5001 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5002 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5003 ); \
5004 lval = (__typeof__(lval)) _res; \
5005 } while (0)
5006
5007#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5008 arg6, arg7 ,arg8, arg9, arg10) \
5009 do { \
5010 volatile OrigFn _orig = (orig); \
5011 volatile unsigned long _argvec[11]; \
5012 volatile unsigned long _res; \
5013 _argvec[0] = (unsigned long)_orig.nraddr; \
5014 _argvec[1] = (unsigned long)arg1; \
5015 _argvec[2] = (unsigned long)arg2; \
5016 _argvec[3] = (unsigned long)arg3; \
5017 _argvec[4] = (unsigned long)arg4; \
5018 _argvec[5] = (unsigned long)arg5; \
5019 _argvec[6] = (unsigned long)arg6; \
5020 _argvec[7] = (unsigned long)arg7; \
5021 _argvec[8] = (unsigned long)arg8; \
5022 _argvec[9] = (unsigned long)arg9; \
5023 _argvec[10] = (unsigned long)arg10; \
5024 __asm__ volatile( \
5025 VALGRIND_CFI_PROLOGUE \
5026 "aghi 15,-200\n\t" \
5027 "lg 2, 8(1)\n\t" \
5028 "lg 3,16(1)\n\t" \
5029 "lg 4,24(1)\n\t" \
5030 "lg 5,32(1)\n\t" \
5031 "lg 6,40(1)\n\t" \
5032 "mvc 160(8,15), 48(1)\n\t" \
5033 "mvc 168(8,15), 56(1)\n\t" \
5034 "mvc 176(8,15), 64(1)\n\t" \
5035 "mvc 184(8,15), 72(1)\n\t" \
5036 "mvc 192(8,15), 80(1)\n\t" \
5037 "lg 1, 0(1)\n\t" \
5038 VALGRIND_CALL_NOREDIR_R1 \
5039 "lgr %0, 2\n\t" \
5040 "aghi 15,200\n\t" \
5041 VALGRIND_CFI_EPILOGUE \
5042 : /*out*/ "=d" (_res) \
5043 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5044 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5045 ); \
5046 lval = (__typeof__(lval)) _res; \
5047 } while (0)
5048
5049#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5050 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5051 do { \
5052 volatile OrigFn _orig = (orig); \
5053 volatile unsigned long _argvec[12]; \
5054 volatile unsigned long _res; \
5055 _argvec[0] = (unsigned long)_orig.nraddr; \
5056 _argvec[1] = (unsigned long)arg1; \
5057 _argvec[2] = (unsigned long)arg2; \
5058 _argvec[3] = (unsigned long)arg3; \
5059 _argvec[4] = (unsigned long)arg4; \
5060 _argvec[5] = (unsigned long)arg5; \
5061 _argvec[6] = (unsigned long)arg6; \
5062 _argvec[7] = (unsigned long)arg7; \
5063 _argvec[8] = (unsigned long)arg8; \
5064 _argvec[9] = (unsigned long)arg9; \
5065 _argvec[10] = (unsigned long)arg10; \
5066 _argvec[11] = (unsigned long)arg11; \
5067 __asm__ volatile( \
5068 VALGRIND_CFI_PROLOGUE \
5069 "aghi 15,-208\n\t" \
5070 "lg 2, 8(1)\n\t" \
5071 "lg 3,16(1)\n\t" \
5072 "lg 4,24(1)\n\t" \
5073 "lg 5,32(1)\n\t" \
5074 "lg 6,40(1)\n\t" \
5075 "mvc 160(8,15), 48(1)\n\t" \
5076 "mvc 168(8,15), 56(1)\n\t" \
5077 "mvc 176(8,15), 64(1)\n\t" \
5078 "mvc 184(8,15), 72(1)\n\t" \
5079 "mvc 192(8,15), 80(1)\n\t" \
5080 "mvc 200(8,15), 88(1)\n\t" \
5081 "lg 1, 0(1)\n\t" \
5082 VALGRIND_CALL_NOREDIR_R1 \
5083 "lgr %0, 2\n\t" \
5084 "aghi 15,208\n\t" \
5085 VALGRIND_CFI_EPILOGUE \
5086 : /*out*/ "=d" (_res) \
5087 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5088 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5089 ); \
5090 lval = (__typeof__(lval)) _res; \
5091 } while (0)
5092
5093#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5094 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5095 do { \
5096 volatile OrigFn _orig = (orig); \
5097 volatile unsigned long _argvec[13]; \
5098 volatile unsigned long _res; \
5099 _argvec[0] = (unsigned long)_orig.nraddr; \
5100 _argvec[1] = (unsigned long)arg1; \
5101 _argvec[2] = (unsigned long)arg2; \
5102 _argvec[3] = (unsigned long)arg3; \
5103 _argvec[4] = (unsigned long)arg4; \
5104 _argvec[5] = (unsigned long)arg5; \
5105 _argvec[6] = (unsigned long)arg6; \
5106 _argvec[7] = (unsigned long)arg7; \
5107 _argvec[8] = (unsigned long)arg8; \
5108 _argvec[9] = (unsigned long)arg9; \
5109 _argvec[10] = (unsigned long)arg10; \
5110 _argvec[11] = (unsigned long)arg11; \
5111 _argvec[12] = (unsigned long)arg12; \
5112 __asm__ volatile( \
5113 VALGRIND_CFI_PROLOGUE \
5114 "aghi 15,-216\n\t" \
5115 "lg 2, 8(1)\n\t" \
5116 "lg 3,16(1)\n\t" \
5117 "lg 4,24(1)\n\t" \
5118 "lg 5,32(1)\n\t" \
5119 "lg 6,40(1)\n\t" \
5120 "mvc 160(8,15), 48(1)\n\t" \
5121 "mvc 168(8,15), 56(1)\n\t" \
5122 "mvc 176(8,15), 64(1)\n\t" \
5123 "mvc 184(8,15), 72(1)\n\t" \
5124 "mvc 192(8,15), 80(1)\n\t" \
5125 "mvc 200(8,15), 88(1)\n\t" \
5126 "mvc 208(8,15), 96(1)\n\t" \
5127 "lg 1, 0(1)\n\t" \
5128 VALGRIND_CALL_NOREDIR_R1 \
5129 "lgr %0, 2\n\t" \
5130 "aghi 15,216\n\t" \
5131 VALGRIND_CFI_EPILOGUE \
5132 : /*out*/ "=d" (_res) \
5133 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5134 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5135 ); \
5136 lval = (__typeof__(lval)) _res; \
5137 } while (0)
5138
5139
5140#endif /* PLAT_s390x_linux */
5141
5142/* ------------------------- mips32-linux ----------------------- */
5143
5144#if defined(PLAT_mips32_linux)
5145
5146/* These regs are trashed by the hidden call. */
5147#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5148"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5149"$25", "$31"
5150
5151/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5152 long) == 4. */
5153
5154#define CALL_FN_W_v(lval, orig) \
5155 do { \
5156 volatile OrigFn _orig = (orig); \
5157 volatile unsigned long _argvec[1]; \
5158 volatile unsigned long _res; \
5159 _argvec[0] = (unsigned long)_orig.nraddr; \
5160 __asm__ volatile( \
5161 "subu $29, $29, 8 \n\t" \
5162 "sw $28, 0($29) \n\t" \
5163 "sw $31, 4($29) \n\t" \
5164 "subu $29, $29, 16 \n\t" \
5165 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5166 VALGRIND_CALL_NOREDIR_T9 \
5167 "addu $29, $29, 16\n\t" \
5168 "lw $28, 0($29) \n\t" \
5169 "lw $31, 4($29) \n\t" \
5170 "addu $29, $29, 8 \n\t" \
5171 "move %0, $2\n" \
5172 : /*out*/ "=r" (_res) \
5173 : /*in*/ "0" (&_argvec[0]) \
5174 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5175 ); \
5176 lval = (__typeof__(lval)) _res; \
5177 } while (0)
5178
5179#define CALL_FN_W_W(lval, orig, arg1) \
5180 do { \
5181 volatile OrigFn _orig = (orig); \
5182 volatile unsigned long _argvec[2]; \
5183 volatile unsigned long _res; \
5184 _argvec[0] = (unsigned long)_orig.nraddr; \
5185 _argvec[1] = (unsigned long)(arg1); \
5186 __asm__ volatile( \
5187 "subu $29, $29, 8 \n\t" \
5188 "sw $28, 0($29) \n\t" \
5189 "sw $31, 4($29) \n\t" \
5190 "subu $29, $29, 16 \n\t" \
5191 "lw $4, 4(%1) \n\t" /* arg1*/ \
5192 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5193 VALGRIND_CALL_NOREDIR_T9 \
5194 "addu $29, $29, 16 \n\t" \
5195 "lw $28, 0($29) \n\t" \
5196 "lw $31, 4($29) \n\t" \
5197 "addu $29, $29, 8 \n\t" \
5198 "move %0, $2\n" \
5199 : /*out*/ "=r" (_res) \
5200 : /*in*/ "0" (&_argvec[0]) \
5201 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5202 ); \
5203 lval = (__typeof__(lval)) _res; \
5204 } while (0)
5205
5206#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5207 do { \
5208 volatile OrigFn _orig = (orig); \
5209 volatile unsigned long _argvec[3]; \
5210 volatile unsigned long _res; \
5211 _argvec[0] = (unsigned long)_orig.nraddr; \
5212 _argvec[1] = (unsigned long)(arg1); \
5213 _argvec[2] = (unsigned long)(arg2); \
5214 __asm__ volatile( \
5215 "subu $29, $29, 8 \n\t" \
5216 "sw $28, 0($29) \n\t" \
5217 "sw $31, 4($29) \n\t" \
5218 "subu $29, $29, 16 \n\t" \
5219 "lw $4, 4(%1) \n\t" \
5220 "lw $5, 8(%1) \n\t" \
5221 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5222 VALGRIND_CALL_NOREDIR_T9 \
5223 "addu $29, $29, 16 \n\t" \
5224 "lw $28, 0($29) \n\t" \
5225 "lw $31, 4($29) \n\t" \
5226 "addu $29, $29, 8 \n\t" \
5227 "move %0, $2\n" \
5228 : /*out*/ "=r" (_res) \
5229 : /*in*/ "0" (&_argvec[0]) \
5230 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5231 ); \
5232 lval = (__typeof__(lval)) _res; \
5233 } while (0)
5234
5235#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5236 do { \
5237 volatile OrigFn _orig = (orig); \
5238 volatile unsigned long _argvec[4]; \
5239 volatile unsigned long _res; \
5240 _argvec[0] = (unsigned long)_orig.nraddr; \
5241 _argvec[1] = (unsigned long)(arg1); \
5242 _argvec[2] = (unsigned long)(arg2); \
5243 _argvec[3] = (unsigned long)(arg3); \
5244 __asm__ volatile( \
5245 "subu $29, $29, 8 \n\t" \
5246 "sw $28, 0($29) \n\t" \
5247 "sw $31, 4($29) \n\t" \
5248 "subu $29, $29, 16 \n\t" \
5249 "lw $4, 4(%1) \n\t" \
5250 "lw $5, 8(%1) \n\t" \
5251 "lw $6, 12(%1) \n\t" \
5252 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5253 VALGRIND_CALL_NOREDIR_T9 \
5254 "addu $29, $29, 16 \n\t" \
5255 "lw $28, 0($29) \n\t" \
5256 "lw $31, 4($29) \n\t" \
5257 "addu $29, $29, 8 \n\t" \
5258 "move %0, $2\n" \
5259 : /*out*/ "=r" (_res) \
5260 : /*in*/ "0" (&_argvec[0]) \
5261 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5262 ); \
5263 lval = (__typeof__(lval)) _res; \
5264 } while (0)
5265
5266#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5267 do { \
5268 volatile OrigFn _orig = (orig); \
5269 volatile unsigned long _argvec[5]; \
5270 volatile unsigned long _res; \
5271 _argvec[0] = (unsigned long)_orig.nraddr; \
5272 _argvec[1] = (unsigned long)(arg1); \
5273 _argvec[2] = (unsigned long)(arg2); \
5274 _argvec[3] = (unsigned long)(arg3); \
5275 _argvec[4] = (unsigned long)(arg4); \
5276 __asm__ volatile( \
5277 "subu $29, $29, 8 \n\t" \
5278 "sw $28, 0($29) \n\t" \
5279 "sw $31, 4($29) \n\t" \
5280 "subu $29, $29, 16 \n\t" \
5281 "lw $4, 4(%1) \n\t" \
5282 "lw $5, 8(%1) \n\t" \
5283 "lw $6, 12(%1) \n\t" \
5284 "lw $7, 16(%1) \n\t" \
5285 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5286 VALGRIND_CALL_NOREDIR_T9 \
5287 "addu $29, $29, 16 \n\t" \
5288 "lw $28, 0($29) \n\t" \
5289 "lw $31, 4($29) \n\t" \
5290 "addu $29, $29, 8 \n\t" \
5291 "move %0, $2\n" \
5292 : /*out*/ "=r" (_res) \
5293 : /*in*/ "0" (&_argvec[0]) \
5294 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5295 ); \
5296 lval = (__typeof__(lval)) _res; \
5297 } while (0)
5298
5299#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5300 do { \
5301 volatile OrigFn _orig = (orig); \
5302 volatile unsigned long _argvec[6]; \
5303 volatile unsigned long _res; \
5304 _argvec[0] = (unsigned long)_orig.nraddr; \
5305 _argvec[1] = (unsigned long)(arg1); \
5306 _argvec[2] = (unsigned long)(arg2); \
5307 _argvec[3] = (unsigned long)(arg3); \
5308 _argvec[4] = (unsigned long)(arg4); \
5309 _argvec[5] = (unsigned long)(arg5); \
5310 __asm__ volatile( \
5311 "subu $29, $29, 8 \n\t" \
5312 "sw $28, 0($29) \n\t" \
5313 "sw $31, 4($29) \n\t" \
5314 "lw $4, 20(%1) \n\t" \
5315 "subu $29, $29, 24\n\t" \
5316 "sw $4, 16($29) \n\t" \
5317 "lw $4, 4(%1) \n\t" \
5318 "lw $5, 8(%1) \n\t" \
5319 "lw $6, 12(%1) \n\t" \
5320 "lw $7, 16(%1) \n\t" \
5321 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5322 VALGRIND_CALL_NOREDIR_T9 \
5323 "addu $29, $29, 24 \n\t" \
5324 "lw $28, 0($29) \n\t" \
5325 "lw $31, 4($29) \n\t" \
5326 "addu $29, $29, 8 \n\t" \
5327 "move %0, $2\n" \
5328 : /*out*/ "=r" (_res) \
5329 : /*in*/ "0" (&_argvec[0]) \
5330 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5331 ); \
5332 lval = (__typeof__(lval)) _res; \
5333 } while (0)
5334#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5335 do { \
5336 volatile OrigFn _orig = (orig); \
5337 volatile unsigned long _argvec[7]; \
5338 volatile unsigned long _res; \
5339 _argvec[0] = (unsigned long)_orig.nraddr; \
5340 _argvec[1] = (unsigned long)(arg1); \
5341 _argvec[2] = (unsigned long)(arg2); \
5342 _argvec[3] = (unsigned long)(arg3); \
5343 _argvec[4] = (unsigned long)(arg4); \
5344 _argvec[5] = (unsigned long)(arg5); \
5345 _argvec[6] = (unsigned long)(arg6); \
5346 __asm__ volatile( \
5347 "subu $29, $29, 8 \n\t" \
5348 "sw $28, 0($29) \n\t" \
5349 "sw $31, 4($29) \n\t" \
5350 "lw $4, 20(%1) \n\t" \
5351 "subu $29, $29, 32\n\t" \
5352 "sw $4, 16($29) \n\t" \
5353 "lw $4, 24(%1) \n\t" \
5354 "nop\n\t" \
5355 "sw $4, 20($29) \n\t" \
5356 "lw $4, 4(%1) \n\t" \
5357 "lw $5, 8(%1) \n\t" \
5358 "lw $6, 12(%1) \n\t" \
5359 "lw $7, 16(%1) \n\t" \
5360 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5361 VALGRIND_CALL_NOREDIR_T9 \
5362 "addu $29, $29, 32 \n\t" \
5363 "lw $28, 0($29) \n\t" \
5364 "lw $31, 4($29) \n\t" \
5365 "addu $29, $29, 8 \n\t" \
5366 "move %0, $2\n" \
5367 : /*out*/ "=r" (_res) \
5368 : /*in*/ "0" (&_argvec[0]) \
5369 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5370 ); \
5371 lval = (__typeof__(lval)) _res; \
5372 } while (0)
5373
5374#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5375 arg7) \
5376 do { \
5377 volatile OrigFn _orig = (orig); \
5378 volatile unsigned long _argvec[8]; \
5379 volatile unsigned long _res; \
5380 _argvec[0] = (unsigned long)_orig.nraddr; \
5381 _argvec[1] = (unsigned long)(arg1); \
5382 _argvec[2] = (unsigned long)(arg2); \
5383 _argvec[3] = (unsigned long)(arg3); \
5384 _argvec[4] = (unsigned long)(arg4); \
5385 _argvec[5] = (unsigned long)(arg5); \
5386 _argvec[6] = (unsigned long)(arg6); \
5387 _argvec[7] = (unsigned long)(arg7); \
5388 __asm__ volatile( \
5389 "subu $29, $29, 8 \n\t" \
5390 "sw $28, 0($29) \n\t" \
5391 "sw $31, 4($29) \n\t" \
5392 "lw $4, 20(%1) \n\t" \
5393 "subu $29, $29, 32\n\t" \
5394 "sw $4, 16($29) \n\t" \
5395 "lw $4, 24(%1) \n\t" \
5396 "sw $4, 20($29) \n\t" \
5397 "lw $4, 28(%1) \n\t" \
5398 "sw $4, 24($29) \n\t" \
5399 "lw $4, 4(%1) \n\t" \
5400 "lw $5, 8(%1) \n\t" \
5401 "lw $6, 12(%1) \n\t" \
5402 "lw $7, 16(%1) \n\t" \
5403 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5404 VALGRIND_CALL_NOREDIR_T9 \
5405 "addu $29, $29, 32 \n\t" \
5406 "lw $28, 0($29) \n\t" \
5407 "lw $31, 4($29) \n\t" \
5408 "addu $29, $29, 8 \n\t" \
5409 "move %0, $2\n" \
5410 : /*out*/ "=r" (_res) \
5411 : /*in*/ "0" (&_argvec[0]) \
5412 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5413 ); \
5414 lval = (__typeof__(lval)) _res; \
5415 } while (0)
5416
5417#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5418 arg7,arg8) \
5419 do { \
5420 volatile OrigFn _orig = (orig); \
5421 volatile unsigned long _argvec[9]; \
5422 volatile unsigned long _res; \
5423 _argvec[0] = (unsigned long)_orig.nraddr; \
5424 _argvec[1] = (unsigned long)(arg1); \
5425 _argvec[2] = (unsigned long)(arg2); \
5426 _argvec[3] = (unsigned long)(arg3); \
5427 _argvec[4] = (unsigned long)(arg4); \
5428 _argvec[5] = (unsigned long)(arg5); \
5429 _argvec[6] = (unsigned long)(arg6); \
5430 _argvec[7] = (unsigned long)(arg7); \
5431 _argvec[8] = (unsigned long)(arg8); \
5432 __asm__ volatile( \
5433 "subu $29, $29, 8 \n\t" \
5434 "sw $28, 0($29) \n\t" \
5435 "sw $31, 4($29) \n\t" \
5436 "lw $4, 20(%1) \n\t" \
5437 "subu $29, $29, 40\n\t" \
5438 "sw $4, 16($29) \n\t" \
5439 "lw $4, 24(%1) \n\t" \
5440 "sw $4, 20($29) \n\t" \
5441 "lw $4, 28(%1) \n\t" \
5442 "sw $4, 24($29) \n\t" \
5443 "lw $4, 32(%1) \n\t" \
5444 "sw $4, 28($29) \n\t" \
5445 "lw $4, 4(%1) \n\t" \
5446 "lw $5, 8(%1) \n\t" \
5447 "lw $6, 12(%1) \n\t" \
5448 "lw $7, 16(%1) \n\t" \
5449 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5450 VALGRIND_CALL_NOREDIR_T9 \
5451 "addu $29, $29, 40 \n\t" \
5452 "lw $28, 0($29) \n\t" \
5453 "lw $31, 4($29) \n\t" \
5454 "addu $29, $29, 8 \n\t" \
5455 "move %0, $2\n" \
5456 : /*out*/ "=r" (_res) \
5457 : /*in*/ "0" (&_argvec[0]) \
5458 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5459 ); \
5460 lval = (__typeof__(lval)) _res; \
5461 } while (0)
5462
5463#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5464 arg7,arg8,arg9) \
5465 do { \
5466 volatile OrigFn _orig = (orig); \
5467 volatile unsigned long _argvec[10]; \
5468 volatile unsigned long _res; \
5469 _argvec[0] = (unsigned long)_orig.nraddr; \
5470 _argvec[1] = (unsigned long)(arg1); \
5471 _argvec[2] = (unsigned long)(arg2); \
5472 _argvec[3] = (unsigned long)(arg3); \
5473 _argvec[4] = (unsigned long)(arg4); \
5474 _argvec[5] = (unsigned long)(arg5); \
5475 _argvec[6] = (unsigned long)(arg6); \
5476 _argvec[7] = (unsigned long)(arg7); \
5477 _argvec[8] = (unsigned long)(arg8); \
5478 _argvec[9] = (unsigned long)(arg9); \
5479 __asm__ volatile( \
5480 "subu $29, $29, 8 \n\t" \
5481 "sw $28, 0($29) \n\t" \
5482 "sw $31, 4($29) \n\t" \
5483 "lw $4, 20(%1) \n\t" \
5484 "subu $29, $29, 40\n\t" \
5485 "sw $4, 16($29) \n\t" \
5486 "lw $4, 24(%1) \n\t" \
5487 "sw $4, 20($29) \n\t" \
5488 "lw $4, 28(%1) \n\t" \
5489 "sw $4, 24($29) \n\t" \
5490 "lw $4, 32(%1) \n\t" \
5491 "sw $4, 28($29) \n\t" \
5492 "lw $4, 36(%1) \n\t" \
5493 "sw $4, 32($29) \n\t" \
5494 "lw $4, 4(%1) \n\t" \
5495 "lw $5, 8(%1) \n\t" \
5496 "lw $6, 12(%1) \n\t" \
5497 "lw $7, 16(%1) \n\t" \
5498 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5499 VALGRIND_CALL_NOREDIR_T9 \
5500 "addu $29, $29, 40 \n\t" \
5501 "lw $28, 0($29) \n\t" \
5502 "lw $31, 4($29) \n\t" \
5503 "addu $29, $29, 8 \n\t" \
5504 "move %0, $2\n" \
5505 : /*out*/ "=r" (_res) \
5506 : /*in*/ "0" (&_argvec[0]) \
5507 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5508 ); \
5509 lval = (__typeof__(lval)) _res; \
5510 } while (0)
5511
5512#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5513 arg7,arg8,arg9,arg10) \
5514 do { \
5515 volatile OrigFn _orig = (orig); \
5516 volatile unsigned long _argvec[11]; \
5517 volatile unsigned long _res; \
5518 _argvec[0] = (unsigned long)_orig.nraddr; \
5519 _argvec[1] = (unsigned long)(arg1); \
5520 _argvec[2] = (unsigned long)(arg2); \
5521 _argvec[3] = (unsigned long)(arg3); \
5522 _argvec[4] = (unsigned long)(arg4); \
5523 _argvec[5] = (unsigned long)(arg5); \
5524 _argvec[6] = (unsigned long)(arg6); \
5525 _argvec[7] = (unsigned long)(arg7); \
5526 _argvec[8] = (unsigned long)(arg8); \
5527 _argvec[9] = (unsigned long)(arg9); \
5528 _argvec[10] = (unsigned long)(arg10); \
5529 __asm__ volatile( \
5530 "subu $29, $29, 8 \n\t" \
5531 "sw $28, 0($29) \n\t" \
5532 "sw $31, 4($29) \n\t" \
5533 "lw $4, 20(%1) \n\t" \
5534 "subu $29, $29, 48\n\t" \
5535 "sw $4, 16($29) \n\t" \
5536 "lw $4, 24(%1) \n\t" \
5537 "sw $4, 20($29) \n\t" \
5538 "lw $4, 28(%1) \n\t" \
5539 "sw $4, 24($29) \n\t" \
5540 "lw $4, 32(%1) \n\t" \
5541 "sw $4, 28($29) \n\t" \
5542 "lw $4, 36(%1) \n\t" \
5543 "sw $4, 32($29) \n\t" \
5544 "lw $4, 40(%1) \n\t" \
5545 "sw $4, 36($29) \n\t" \
5546 "lw $4, 4(%1) \n\t" \
5547 "lw $5, 8(%1) \n\t" \
5548 "lw $6, 12(%1) \n\t" \
5549 "lw $7, 16(%1) \n\t" \
5550 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5551 VALGRIND_CALL_NOREDIR_T9 \
5552 "addu $29, $29, 48 \n\t" \
5553 "lw $28, 0($29) \n\t" \
5554 "lw $31, 4($29) \n\t" \
5555 "addu $29, $29, 8 \n\t" \
5556 "move %0, $2\n" \
5557 : /*out*/ "=r" (_res) \
5558 : /*in*/ "0" (&_argvec[0]) \
5559 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5560 ); \
5561 lval = (__typeof__(lval)) _res; \
5562 } while (0)
5563
5564#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5565 arg6,arg7,arg8,arg9,arg10, \
5566 arg11) \
5567 do { \
5568 volatile OrigFn _orig = (orig); \
5569 volatile unsigned long _argvec[12]; \
5570 volatile unsigned long _res; \
5571 _argvec[0] = (unsigned long)_orig.nraddr; \
5572 _argvec[1] = (unsigned long)(arg1); \
5573 _argvec[2] = (unsigned long)(arg2); \
5574 _argvec[3] = (unsigned long)(arg3); \
5575 _argvec[4] = (unsigned long)(arg4); \
5576 _argvec[5] = (unsigned long)(arg5); \
5577 _argvec[6] = (unsigned long)(arg6); \
5578 _argvec[7] = (unsigned long)(arg7); \
5579 _argvec[8] = (unsigned long)(arg8); \
5580 _argvec[9] = (unsigned long)(arg9); \
5581 _argvec[10] = (unsigned long)(arg10); \
5582 _argvec[11] = (unsigned long)(arg11); \
5583 __asm__ volatile( \
5584 "subu $29, $29, 8 \n\t" \
5585 "sw $28, 0($29) \n\t" \
5586 "sw $31, 4($29) \n\t" \
5587 "lw $4, 20(%1) \n\t" \
5588 "subu $29, $29, 48\n\t" \
5589 "sw $4, 16($29) \n\t" \
5590 "lw $4, 24(%1) \n\t" \
5591 "sw $4, 20($29) \n\t" \
5592 "lw $4, 28(%1) \n\t" \
5593 "sw $4, 24($29) \n\t" \
5594 "lw $4, 32(%1) \n\t" \
5595 "sw $4, 28($29) \n\t" \
5596 "lw $4, 36(%1) \n\t" \
5597 "sw $4, 32($29) \n\t" \
5598 "lw $4, 40(%1) \n\t" \
5599 "sw $4, 36($29) \n\t" \
5600 "lw $4, 44(%1) \n\t" \
5601 "sw $4, 40($29) \n\t" \
5602 "lw $4, 4(%1) \n\t" \
5603 "lw $5, 8(%1) \n\t" \
5604 "lw $6, 12(%1) \n\t" \
5605 "lw $7, 16(%1) \n\t" \
5606 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5607 VALGRIND_CALL_NOREDIR_T9 \
5608 "addu $29, $29, 48 \n\t" \
5609 "lw $28, 0($29) \n\t" \
5610 "lw $31, 4($29) \n\t" \
5611 "addu $29, $29, 8 \n\t" \
5612 "move %0, $2\n" \
5613 : /*out*/ "=r" (_res) \
5614 : /*in*/ "0" (&_argvec[0]) \
5615 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5616 ); \
5617 lval = (__typeof__(lval)) _res; \
5618 } while (0)
5619
5620#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5621 arg6,arg7,arg8,arg9,arg10, \
5622 arg11,arg12) \
5623 do { \
5624 volatile OrigFn _orig = (orig); \
5625 volatile unsigned long _argvec[13]; \
5626 volatile unsigned long _res; \
5627 _argvec[0] = (unsigned long)_orig.nraddr; \
5628 _argvec[1] = (unsigned long)(arg1); \
5629 _argvec[2] = (unsigned long)(arg2); \
5630 _argvec[3] = (unsigned long)(arg3); \
5631 _argvec[4] = (unsigned long)(arg4); \
5632 _argvec[5] = (unsigned long)(arg5); \
5633 _argvec[6] = (unsigned long)(arg6); \
5634 _argvec[7] = (unsigned long)(arg7); \
5635 _argvec[8] = (unsigned long)(arg8); \
5636 _argvec[9] = (unsigned long)(arg9); \
5637 _argvec[10] = (unsigned long)(arg10); \
5638 _argvec[11] = (unsigned long)(arg11); \
5639 _argvec[12] = (unsigned long)(arg12); \
5640 __asm__ volatile( \
5641 "subu $29, $29, 8 \n\t" \
5642 "sw $28, 0($29) \n\t" \
5643 "sw $31, 4($29) \n\t" \
5644 "lw $4, 20(%1) \n\t" \
5645 "subu $29, $29, 56\n\t" \
5646 "sw $4, 16($29) \n\t" \
5647 "lw $4, 24(%1) \n\t" \
5648 "sw $4, 20($29) \n\t" \
5649 "lw $4, 28(%1) \n\t" \
5650 "sw $4, 24($29) \n\t" \
5651 "lw $4, 32(%1) \n\t" \
5652 "sw $4, 28($29) \n\t" \
5653 "lw $4, 36(%1) \n\t" \
5654 "sw $4, 32($29) \n\t" \
5655 "lw $4, 40(%1) \n\t" \
5656 "sw $4, 36($29) \n\t" \
5657 "lw $4, 44(%1) \n\t" \
5658 "sw $4, 40($29) \n\t" \
5659 "lw $4, 48(%1) \n\t" \
5660 "sw $4, 44($29) \n\t" \
5661 "lw $4, 4(%1) \n\t" \
5662 "lw $5, 8(%1) \n\t" \
5663 "lw $6, 12(%1) \n\t" \
5664 "lw $7, 16(%1) \n\t" \
5665 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5666 VALGRIND_CALL_NOREDIR_T9 \
5667 "addu $29, $29, 56 \n\t" \
5668 "lw $28, 0($29) \n\t" \
5669 "lw $31, 4($29) \n\t" \
5670 "addu $29, $29, 8 \n\t" \
5671 "move %0, $2\n" \
5672 : /*out*/ "=r" (_res) \
5673 : /*in*/ "r" (&_argvec[0]) \
5674 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5675 ); \
5676 lval = (__typeof__(lval)) _res; \
5677 } while (0)
5678
5679#endif /* PLAT_mips32_linux */
5680
5681/* ------------------------- mips64-linux ------------------------- */
5682
5683#if defined(PLAT_mips64_linux)
5684
5685/* These regs are trashed by the hidden call. */
5686#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5687"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5688"$25", "$31"
5689
5690/* These CALL_FN_ macros assume that on mips64-linux,
5691 sizeof(long long) == 8. */
5692
5693#define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
5694
5695#define CALL_FN_W_v(lval, orig) \
5696 do { \
5697 volatile OrigFn _orig = (orig); \
5698 volatile unsigned long long _argvec[1]; \
5699 volatile unsigned long long _res; \
5700 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5701 __asm__ volatile( \
5702 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5703 VALGRIND_CALL_NOREDIR_T9 \
5704 "move %0, $2\n" \
5705 : /*out*/ "=r" (_res) \
5706 : /*in*/ "0" (&_argvec[0]) \
5707 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5708 ); \
5709 lval = (__typeof__(lval)) (long)_res; \
5710 } while (0)
5711
5712#define CALL_FN_W_W(lval, orig, arg1) \
5713 do { \
5714 volatile OrigFn _orig = (orig); \
5715 volatile unsigned long long _argvec[2]; \
5716 volatile unsigned long long _res; \
5717 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5718 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5719 __asm__ volatile( \
5720 "ld $4, 8(%1)\n\t" /* arg1*/ \
5721 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5722 VALGRIND_CALL_NOREDIR_T9 \
5723 "move %0, $2\n" \
5724 : /*out*/ "=r" (_res) \
5725 : /*in*/ "r" (&_argvec[0]) \
5726 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5727 ); \
5728 lval = (__typeof__(lval)) (long)_res; \
5729 } while (0)
5730
5731#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5732 do { \
5733 volatile OrigFn _orig = (orig); \
5734 volatile unsigned long long _argvec[3]; \
5735 volatile unsigned long long _res; \
5736 _argvec[0] = _orig.nraddr; \
5737 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5738 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5739 __asm__ volatile( \
5740 "ld $4, 8(%1)\n\t" \
5741 "ld $5, 16(%1)\n\t" \
5742 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5743 VALGRIND_CALL_NOREDIR_T9 \
5744 "move %0, $2\n" \
5745 : /*out*/ "=r" (_res) \
5746 : /*in*/ "r" (&_argvec[0]) \
5747 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5748 ); \
5749 lval = (__typeof__(lval)) (long)_res; \
5750 } while (0)
5751
5752
5753#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5754 do { \
5755 volatile OrigFn _orig = (orig); \
5756 volatile unsigned long long _argvec[4]; \
5757 volatile unsigned long long _res; \
5758 _argvec[0] = _orig.nraddr; \
5759 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5760 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5761 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5762 __asm__ volatile( \
5763 "ld $4, 8(%1)\n\t" \
5764 "ld $5, 16(%1)\n\t" \
5765 "ld $6, 24(%1)\n\t" \
5766 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5767 VALGRIND_CALL_NOREDIR_T9 \
5768 "move %0, $2\n" \
5769 : /*out*/ "=r" (_res) \
5770 : /*in*/ "r" (&_argvec[0]) \
5771 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5772 ); \
5773 lval = (__typeof__(lval)) (long)_res; \
5774 } while (0)
5775
5776#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5777 do { \
5778 volatile OrigFn _orig = (orig); \
5779 volatile unsigned long long _argvec[5]; \
5780 volatile unsigned long long _res; \
5781 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5782 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5783 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5784 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5785 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5786 __asm__ volatile( \
5787 "ld $4, 8(%1)\n\t" \
5788 "ld $5, 16(%1)\n\t" \
5789 "ld $6, 24(%1)\n\t" \
5790 "ld $7, 32(%1)\n\t" \
5791 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5792 VALGRIND_CALL_NOREDIR_T9 \
5793 "move %0, $2\n" \
5794 : /*out*/ "=r" (_res) \
5795 : /*in*/ "r" (&_argvec[0]) \
5796 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5797 ); \
5798 lval = (__typeof__(lval)) (long)_res; \
5799 } while (0)
5800
5801#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5802 do { \
5803 volatile OrigFn _orig = (orig); \
5804 volatile unsigned long long _argvec[6]; \
5805 volatile unsigned long long _res; \
5806 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5807 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5808 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5809 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5810 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5811 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5812 __asm__ volatile( \
5813 "ld $4, 8(%1)\n\t" \
5814 "ld $5, 16(%1)\n\t" \
5815 "ld $6, 24(%1)\n\t" \
5816 "ld $7, 32(%1)\n\t" \
5817 "ld $8, 40(%1)\n\t" \
5818 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5819 VALGRIND_CALL_NOREDIR_T9 \
5820 "move %0, $2\n" \
5821 : /*out*/ "=r" (_res) \
5822 : /*in*/ "r" (&_argvec[0]) \
5823 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5824 ); \
5825 lval = (__typeof__(lval)) (long)_res; \
5826 } while (0)
5827
5828#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5829 do { \
5830 volatile OrigFn _orig = (orig); \
5831 volatile unsigned long long _argvec[7]; \
5832 volatile unsigned long long _res; \
5833 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5834 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5835 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5836 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5837 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5838 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5839 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5840 __asm__ volatile( \
5841 "ld $4, 8(%1)\n\t" \
5842 "ld $5, 16(%1)\n\t" \
5843 "ld $6, 24(%1)\n\t" \
5844 "ld $7, 32(%1)\n\t" \
5845 "ld $8, 40(%1)\n\t" \
5846 "ld $9, 48(%1)\n\t" \
5847 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5848 VALGRIND_CALL_NOREDIR_T9 \
5849 "move %0, $2\n" \
5850 : /*out*/ "=r" (_res) \
5851 : /*in*/ "r" (&_argvec[0]) \
5852 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5853 ); \
5854 lval = (__typeof__(lval)) (long)_res; \
5855 } while (0)
5856
5857#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5858 arg7) \
5859 do { \
5860 volatile OrigFn _orig = (orig); \
5861 volatile unsigned long long _argvec[8]; \
5862 volatile unsigned long long _res; \
5863 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5864 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5865 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5866 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5867 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5868 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5869 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5870 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5871 __asm__ volatile( \
5872 "ld $4, 8(%1)\n\t" \
5873 "ld $5, 16(%1)\n\t" \
5874 "ld $6, 24(%1)\n\t" \
5875 "ld $7, 32(%1)\n\t" \
5876 "ld $8, 40(%1)\n\t" \
5877 "ld $9, 48(%1)\n\t" \
5878 "ld $10, 56(%1)\n\t" \
5879 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5880 VALGRIND_CALL_NOREDIR_T9 \
5881 "move %0, $2\n" \
5882 : /*out*/ "=r" (_res) \
5883 : /*in*/ "r" (&_argvec[0]) \
5884 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5885 ); \
5886 lval = (__typeof__(lval)) (long)_res; \
5887 } while (0)
5888
5889#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5890 arg7,arg8) \
5891 do { \
5892 volatile OrigFn _orig = (orig); \
5893 volatile unsigned long long _argvec[9]; \
5894 volatile unsigned long long _res; \
5895 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5896 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5897 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5898 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5899 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5900 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5901 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5902 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5903 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5904 __asm__ volatile( \
5905 "ld $4, 8(%1)\n\t" \
5906 "ld $5, 16(%1)\n\t" \
5907 "ld $6, 24(%1)\n\t" \
5908 "ld $7, 32(%1)\n\t" \
5909 "ld $8, 40(%1)\n\t" \
5910 "ld $9, 48(%1)\n\t" \
5911 "ld $10, 56(%1)\n\t" \
5912 "ld $11, 64(%1)\n\t" \
5913 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5914 VALGRIND_CALL_NOREDIR_T9 \
5915 "move %0, $2\n" \
5916 : /*out*/ "=r" (_res) \
5917 : /*in*/ "r" (&_argvec[0]) \
5918 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5919 ); \
5920 lval = (__typeof__(lval)) (long)_res; \
5921 } while (0)
5922
5923#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5924 arg7,arg8,arg9) \
5925 do { \
5926 volatile OrigFn _orig = (orig); \
5927 volatile unsigned long long _argvec[10]; \
5928 volatile unsigned long long _res; \
5929 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5930 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5931 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5932 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5933 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5934 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5935 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5936 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5937 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5938 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5939 __asm__ volatile( \
5940 "dsubu $29, $29, 8\n\t" \
5941 "ld $4, 72(%1)\n\t" \
5942 "sd $4, 0($29)\n\t" \
5943 "ld $4, 8(%1)\n\t" \
5944 "ld $5, 16(%1)\n\t" \
5945 "ld $6, 24(%1)\n\t" \
5946 "ld $7, 32(%1)\n\t" \
5947 "ld $8, 40(%1)\n\t" \
5948 "ld $9, 48(%1)\n\t" \
5949 "ld $10, 56(%1)\n\t" \
5950 "ld $11, 64(%1)\n\t" \
5951 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5952 VALGRIND_CALL_NOREDIR_T9 \
5953 "daddu $29, $29, 8\n\t" \
5954 "move %0, $2\n" \
5955 : /*out*/ "=r" (_res) \
5956 : /*in*/ "r" (&_argvec[0]) \
5957 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5958 ); \
5959 lval = (__typeof__(lval)) (long)_res; \
5960 } while (0)
5961
5962#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5963 arg7,arg8,arg9,arg10) \
5964 do { \
5965 volatile OrigFn _orig = (orig); \
5966 volatile unsigned long long _argvec[11]; \
5967 volatile unsigned long long _res; \
5968 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5969 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5970 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5971 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5972 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5973 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5974 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5975 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5976 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5977 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5978 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
5979 __asm__ volatile( \
5980 "dsubu $29, $29, 16\n\t" \
5981 "ld $4, 72(%1)\n\t" \
5982 "sd $4, 0($29)\n\t" \
5983 "ld $4, 80(%1)\n\t" \
5984 "sd $4, 8($29)\n\t" \
5985 "ld $4, 8(%1)\n\t" \
5986 "ld $5, 16(%1)\n\t" \
5987 "ld $6, 24(%1)\n\t" \
5988 "ld $7, 32(%1)\n\t" \
5989 "ld $8, 40(%1)\n\t" \
5990 "ld $9, 48(%1)\n\t" \
5991 "ld $10, 56(%1)\n\t" \
5992 "ld $11, 64(%1)\n\t" \
5993 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5994 VALGRIND_CALL_NOREDIR_T9 \
5995 "daddu $29, $29, 16\n\t" \
5996 "move %0, $2\n" \
5997 : /*out*/ "=r" (_res) \
5998 : /*in*/ "r" (&_argvec[0]) \
5999 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6000 ); \
6001 lval = (__typeof__(lval)) (long)_res; \
6002 } while (0)
6003
6004#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6005 arg6,arg7,arg8,arg9,arg10, \
6006 arg11) \
6007 do { \
6008 volatile OrigFn _orig = (orig); \
6009 volatile unsigned long long _argvec[12]; \
6010 volatile unsigned long long _res; \
6011 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6012 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6013 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6014 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6015 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6016 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6017 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6018 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6019 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6020 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6021 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6022 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6023 __asm__ volatile( \
6024 "dsubu $29, $29, 24\n\t" \
6025 "ld $4, 72(%1)\n\t" \
6026 "sd $4, 0($29)\n\t" \
6027 "ld $4, 80(%1)\n\t" \
6028 "sd $4, 8($29)\n\t" \
6029 "ld $4, 88(%1)\n\t" \
6030 "sd $4, 16($29)\n\t" \
6031 "ld $4, 8(%1)\n\t" \
6032 "ld $5, 16(%1)\n\t" \
6033 "ld $6, 24(%1)\n\t" \
6034 "ld $7, 32(%1)\n\t" \
6035 "ld $8, 40(%1)\n\t" \
6036 "ld $9, 48(%1)\n\t" \
6037 "ld $10, 56(%1)\n\t" \
6038 "ld $11, 64(%1)\n\t" \
6039 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6040 VALGRIND_CALL_NOREDIR_T9 \
6041 "daddu $29, $29, 24\n\t" \
6042 "move %0, $2\n" \
6043 : /*out*/ "=r" (_res) \
6044 : /*in*/ "r" (&_argvec[0]) \
6045 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6046 ); \
6047 lval = (__typeof__(lval)) (long)_res; \
6048 } while (0)
6049
6050#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6051 arg6,arg7,arg8,arg9,arg10, \
6052 arg11,arg12) \
6053 do { \
6054 volatile OrigFn _orig = (orig); \
6055 volatile unsigned long long _argvec[13]; \
6056 volatile unsigned long long _res; \
6057 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6058 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6059 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6060 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6061 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6062 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6063 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6064 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6065 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6066 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6067 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6068 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6069 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6070 __asm__ volatile( \
6071 "dsubu $29, $29, 32\n\t" \
6072 "ld $4, 72(%1)\n\t" \
6073 "sd $4, 0($29)\n\t" \
6074 "ld $4, 80(%1)\n\t" \
6075 "sd $4, 8($29)\n\t" \
6076 "ld $4, 88(%1)\n\t" \
6077 "sd $4, 16($29)\n\t" \
6078 "ld $4, 96(%1)\n\t" \
6079 "sd $4, 24($29)\n\t" \
6080 "ld $4, 8(%1)\n\t" \
6081 "ld $5, 16(%1)\n\t" \
6082 "ld $6, 24(%1)\n\t" \
6083 "ld $7, 32(%1)\n\t" \
6084 "ld $8, 40(%1)\n\t" \
6085 "ld $9, 48(%1)\n\t" \
6086 "ld $10, 56(%1)\n\t" \
6087 "ld $11, 64(%1)\n\t" \
6088 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6089 VALGRIND_CALL_NOREDIR_T9 \
6090 "daddu $29, $29, 32\n\t" \
6091 "move %0, $2\n" \
6092 : /*out*/ "=r" (_res) \
6093 : /*in*/ "r" (&_argvec[0]) \
6094 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6095 ); \
6096 lval = (__typeof__(lval)) (long)_res; \
6097 } while (0)
6098
6099#endif /* PLAT_mips64_linux */
6100
6101/* ------------------------------------------------------------------ */
6102/* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6103/* */
6104/* ------------------------------------------------------------------ */
6105
6106/* Some request codes. There are many more of these, but most are not
6107 exposed to end-user view. These are the public ones, all of the
6108 form 0x1000 + small_number.
6109
6110 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6111 ones start at 0x2000.
6112*/
6113
6114/* These macros are used by tools -- they must be public, but don't
6115 embed them into other programs. */
6116#define VG_USERREQ_TOOL_BASE(a,b) \
6117 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6118#define VG_IS_TOOL_USERREQ(a, b, v) \
6119 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6120
6121/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6122 This enum comprises an ABI exported by Valgrind to programs
6123 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6124 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6125 relevant group. */
6126typedef
6127 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6128 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6129
6130 /* These allow any function to be called from the simulated
6131 CPU but run on the real CPU. Nb: the first arg passed to
6132 the function is always the ThreadId of the running
6133 thread! So CLIENT_CALL0 actually requires a 1 arg
6134 function, etc. */
6135 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6136 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6137 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6138 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6139
6140 /* Can be useful in regression testing suites -- eg. can
6141 send Valgrind's output to /dev/null and still count
6142 errors. */
6143 VG_USERREQ__COUNT_ERRORS = 0x1201,
6144
6145 /* Allows the client program and/or gdbserver to execute a monitor
6146 command. */
6147 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6148
6149 /* These are useful and can be interpreted by any tool that
6150 tracks malloc() et al, by using vg_replace_malloc.c. */
6151 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6152 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6153 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6154 /* Memory pool support. */
6155 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6156 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6157 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6158 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6159 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6160 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6161 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6162 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6163
6164 /* Allow printfs to valgrind log. */
6165 /* The first two pass the va_list argument by value, which
6166 assumes it is the same size as or smaller than a UWord,
6167 which generally isn't the case. Hence are deprecated.
6168 The second two pass the vargs by reference and so are
6169 immune to this problem. */
6170 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6171 VG_USERREQ__PRINTF = 0x1401,
6172 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6173 /* both :: char* fmt, va_list* vargs */
6174 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6175 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6176
6177 /* Stack support. */
6178 VG_USERREQ__STACK_REGISTER = 0x1501,
6179 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6180 VG_USERREQ__STACK_CHANGE = 0x1503,
6181
6182 /* Wine support */
6183 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6184
6185 /* Querying of debug info. */
6186 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6187
6188 /* Disable/enable error reporting level. Takes a single
6189 Word arg which is the delta to this thread's error
6190 disablement indicator. Hence 1 disables or further
6191 disables errors, and -1 moves back towards enablement.
6192 Other values are not allowed. */
6193 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6194
6195 /* Some requests used for Valgrind internal, such as
6196 self-test or self-hosting. */
6197 /* Initialise IR injection */
6198 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6199 /* Used by Inner Valgrind to inform Outer Valgrind where to
6200 find the list of inner guest threads */
6201 VG_USERREQ__INNER_THREADS = 0x1902
6202 } Vg_ClientRequest;
6203
6204#if !defined(__GNUC__)
6205# define __extension__ /* */
6206#endif
6207
6208
6209/* Returns the number of Valgrinds this code is running under. That
6210 is, 0 if running natively, 1 if running under Valgrind, 2 if
6211 running under Valgrind which is running under another Valgrind,
6212 etc. */
6213#define RUNNING_ON_VALGRIND \
6214 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6215 VG_USERREQ__RUNNING_ON_VALGRIND, \
6216 0, 0, 0, 0, 0) \
6217
6218
6219/* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6220 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6221 since it provides a way to make sure valgrind will retranslate the
6222 invalidated area. Returns no value. */
6223#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6224 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6225 _qzz_addr, _qzz_len, 0, 0, 0)
6226
6227#define VALGRIND_INNER_THREADS(_qzz_addr) \
6228 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6229 _qzz_addr, 0, 0, 0, 0)
6230
6231
6232/* These requests are for getting Valgrind itself to print something.
6233 Possibly with a backtrace. This is a really ugly hack. The return value
6234 is the number of characters printed, excluding the "**<pid>** " part at the
6235 start and the backtrace (if present). */
6236
6237#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6238/* Modern GCC will optimize the static routine out if unused,
6239 and unused attribute will shut down warnings about it. */
6240static int VALGRIND_PRINTF(const char *format, ...)
6241 __attribute__((format(__printf__, 1, 2), __unused__));
6242#endif
6243static int
6244#if defined(_MSC_VER)
6245__inline
6246#endif
6247VALGRIND_PRINTF(const char *format, ...)
6248{
6249#if defined(NVALGRIND)
6250 (void)format;
6251 return 0;
6252#else /* NVALGRIND */
6253#if defined(_MSC_VER) || defined(__MINGW64__)
6254 uintptr_t _qzz_res;
6255#else
6256 unsigned long _qzz_res;
6257#endif
6258 va_list vargs;
6259 va_start(vargs, format);
6260#if defined(_MSC_VER) || defined(__MINGW64__)
6261 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6262 VG_USERREQ__PRINTF_VALIST_BY_REF,
6263 (uintptr_t)format,
6264 (uintptr_t)&vargs,
6265 0, 0, 0);
6266#else
6267 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6268 VG_USERREQ__PRINTF_VALIST_BY_REF,
6269 (unsigned long)format,
6270 (unsigned long)&vargs,
6271 0, 0, 0);
6272#endif
6273 va_end(vargs);
6274 return (int)_qzz_res;
6275#endif /* NVALGRIND */
6276}
6277
6278#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6279static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6280 __attribute__((format(__printf__, 1, 2), __unused__));
6281#endif
6282static int
6283#if defined(_MSC_VER)
6284__inline
6285#endif
6286VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6287{
6288#if defined(NVALGRIND)
6289 (void)format;
6290 return 0;
6291#else /* NVALGRIND */
6292#if defined(_MSC_VER) || defined(__MINGW64__)
6293 uintptr_t _qzz_res;
6294#else
6295 unsigned long _qzz_res;
6296#endif
6297 va_list vargs;
6298 va_start(vargs, format);
6299#if defined(_MSC_VER) || defined(__MINGW64__)
6300 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6301 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6302 (uintptr_t)format,
6303 (uintptr_t)&vargs,
6304 0, 0, 0);
6305#else
6306 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6307 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6308 (unsigned long)format,
6309 (unsigned long)&vargs,
6310 0, 0, 0);
6311#endif
6312 va_end(vargs);
6313 return (int)_qzz_res;
6314#endif /* NVALGRIND */
6315}
6316
6317
6318/* These requests allow control to move from the simulated CPU to the
6319 real CPU, calling an arbitrary function.
6320
6321 Note that the current ThreadId is inserted as the first argument.
6322 So this call:
6323
6324 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6325
6326 requires f to have this signature:
6327
6328 Word f(Word tid, Word arg1, Word arg2)
6329
6330 where "Word" is a word-sized type.
6331
6332 Note that these client requests are not entirely reliable. For example,
6333 if you call a function with them that subsequently calls printf(),
6334 there's a high chance Valgrind will crash. Generally, your prospects of
6335 these working are made higher if the called function does not refer to
6336 any global variables, and does not refer to any libc or other functions
6337 (printf et al). Any kind of entanglement with libc or dynamic linking is
6338 likely to have a bad outcome, for tricky reasons which we've grappled
6339 with a lot in the past.
6340*/
6341#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6342 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6343 VG_USERREQ__CLIENT_CALL0, \
6344 _qyy_fn, \
6345 0, 0, 0, 0)
6346
6347#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6348 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6349 VG_USERREQ__CLIENT_CALL1, \
6350 _qyy_fn, \
6351 _qyy_arg1, 0, 0, 0)
6352
6353#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6354 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6355 VG_USERREQ__CLIENT_CALL2, \
6356 _qyy_fn, \
6357 _qyy_arg1, _qyy_arg2, 0, 0)
6358
6359#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6360 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6361 VG_USERREQ__CLIENT_CALL3, \
6362 _qyy_fn, \
6363 _qyy_arg1, _qyy_arg2, \
6364 _qyy_arg3, 0)
6365
6366
6367/* Counts the number of errors that have been recorded by a tool. Nb:
6368 the tool must record the errors with VG_(maybe_record_error)() or
6369 VG_(unique_error)() for them to be counted. */
6370#define VALGRIND_COUNT_ERRORS \
6371 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6372 0 /* default return */, \
6373 VG_USERREQ__COUNT_ERRORS, \
6374 0, 0, 0, 0, 0)
6375
6376/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6377 when heap blocks are allocated in order to give accurate results. This
6378 happens automatically for the standard allocator functions such as
6379 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6380 delete[], etc.
6381
6382 But if your program uses a custom allocator, this doesn't automatically
6383 happen, and Valgrind will not do as well. For example, if you allocate
6384 superblocks with mmap() and then allocates chunks of the superblocks, all
6385 Valgrind's observations will be at the mmap() level and it won't know that
6386 the chunks should be considered separate entities. In Memcheck's case,
6387 that means you probably won't get heap block overrun detection (because
6388 there won't be redzones marked as unaddressable) and you definitely won't
6389 get any leak detection.
6390
6391 The following client requests allow a custom allocator to be annotated so
6392 that it can be handled accurately by Valgrind.
6393
6394 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6395 by a malloc()-like function. For Memcheck (an illustrative case), this
6396 does two things:
6397
6398 - It records that the block has been allocated. This means any addresses
6399 within the block mentioned in error messages will be
6400 identified as belonging to the block. It also means that if the block
6401 isn't freed it will be detected by the leak checker.
6402
6403 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6404 not set), or addressable and defined (if 'is_zeroed' is set). This
6405 controls how accesses to the block by the program are handled.
6406
6407 'addr' is the start of the usable block (ie. after any
6408 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6409 can apply redzones -- these are blocks of padding at the start and end of
6410 each block. Adding redzones is recommended as it makes it much more likely
6411 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6412 zeroed (or filled with another predictable value), as is the case for
6413 calloc().
6414
6415 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6416 heap block -- that will be used by the client program -- is allocated.
6417 It's best to put it at the outermost level of the allocator if possible;
6418 for example, if you have a function my_alloc() which calls
6419 internal_alloc(), and the client request is put inside internal_alloc(),
6420 stack traces relating to the heap block will contain entries for both
6421 my_alloc() and internal_alloc(), which is probably not what you want.
6422
6423 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6424 custom blocks from within a heap block, B, that has been allocated with
6425 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6426 -- the custom blocks will take precedence.
6427
6428 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6429 Memcheck, it does two things:
6430
6431 - It records that the block has been deallocated. This assumes that the
6432 block was annotated as having been allocated via
6433 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6434
6435 - It marks the block as being unaddressable.
6436
6437 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6438 heap block is deallocated.
6439
6440 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6441 Memcheck, it does four things:
6442
6443 - It records that the size of a block has been changed. This assumes that
6444 the block was annotated as having been allocated via
6445 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6446
6447 - If the block shrunk, it marks the freed memory as being unaddressable.
6448
6449 - If the block grew, it marks the new area as undefined and defines a red
6450 zone past the end of the new block.
6451
6452 - The V-bits of the overlap between the old and the new block are preserved.
6453
6454 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6455 and before deallocation of the old block.
6456
6457 In many cases, these three client requests will not be enough to get your
6458 allocator working well with Memcheck. More specifically, if your allocator
6459 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6460 will be necessary to mark the memory as addressable just before the zeroing
6461 occurs, otherwise you'll get a lot of invalid write errors. For example,
6462 you'll need to do this if your allocator recycles freed blocks, but it
6463 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6464 Alternatively, if your allocator reuses freed blocks for allocator-internal
6465 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6466
6467 Really, what's happening is a blurring of the lines between the client
6468 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6469 memory should be considered unaddressable to the client program, but the
6470 allocator knows more than the rest of the client program and so may be able
6471 to safely access it. Extra client requests are necessary for Valgrind to
6472 understand the distinction between the allocator and the rest of the
6473 program.
6474
6475 Ignored if addr == 0.
6476*/
6477#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6478 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6479 addr, sizeB, rzB, is_zeroed, 0)
6480
6481/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6482 Ignored if addr == 0.
6483*/
6484#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6485 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6486 addr, oldSizeB, newSizeB, rzB, 0)
6487
6488/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6489 Ignored if addr == 0.
6490*/
6491#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6492 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6493 addr, rzB, 0, 0, 0)
6494
6495/* Create a memory pool. */
6496#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6497 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6498 pool, rzB, is_zeroed, 0, 0)
6499
6500/* Create a memory pool with some flags specifying extended behaviour.
6501 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
6502
6503 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
6504 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
6505 by the application as superblocks to dole out MALLOC_LIKE blocks using
6506 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
6507 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
6508 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
6509 Note that the association between the pool and the second level blocks
6510 is implicit : second level blocks will be located inside first level
6511 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
6512 for such 2 levels pools, as otherwise valgrind will detect overlapping
6513 memory blocks, and will abort execution (e.g. during leak search).
6514
6515 Such a meta pool can also be marked as an 'auto free' pool using the flag
6516 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
6517 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
6518 will automatically free the second level blocks that are contained
6519 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
6520 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
6521 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
6522 in the first level block.
6523 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
6524 without the VALGRIND_MEMPOOL_METAPOOL flag.
6525*/
6526#define VALGRIND_MEMPOOL_AUTO_FREE 1
6527#define VALGRIND_MEMPOOL_METAPOOL 2
6528#define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
6529 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6530 pool, rzB, is_zeroed, flags, 0)
6531
6532/* Destroy a memory pool. */
6533#define VALGRIND_DESTROY_MEMPOOL(pool) \
6534 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6535 pool, 0, 0, 0, 0)
6536
6537/* Associate a piece of memory with a memory pool. */
6538#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6539 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6540 pool, addr, size, 0, 0)
6541
6542/* Disassociate a piece of memory from a memory pool. */
6543#define VALGRIND_MEMPOOL_FREE(pool, addr) \
6544 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6545 pool, addr, 0, 0, 0)
6546
6547/* Disassociate any pieces outside a particular range. */
6548#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6549 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
6550 pool, addr, size, 0, 0)
6551
6552/* Resize and/or move a piece associated with a memory pool. */
6553#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
6554 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
6555 poolA, poolB, 0, 0, 0)
6556
6557/* Resize and/or move a piece associated with a memory pool. */
6558#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
6559 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
6560 pool, addrA, addrB, size, 0)
6561
6562/* Return 1 if a mempool exists, else 0. */
6563#define VALGRIND_MEMPOOL_EXISTS(pool) \
6564 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6565 VG_USERREQ__MEMPOOL_EXISTS, \
6566 pool, 0, 0, 0, 0)
6567
6568/* Mark a piece of memory as being a stack. Returns a stack id.
6569 start is the lowest addressable stack byte, end is the highest
6570 addressable stack byte. */
6571#define VALGRIND_STACK_REGISTER(start, end) \
6572 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6573 VG_USERREQ__STACK_REGISTER, \
6574 start, end, 0, 0, 0)
6575
6576/* Unmark the piece of memory associated with a stack id as being a
6577 stack. */
6578#define VALGRIND_STACK_DEREGISTER(id) \
6579 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
6580 id, 0, 0, 0, 0)
6581
6582/* Change the start and end address of the stack id.
6583 start is the new lowest addressable stack byte, end is the new highest
6584 addressable stack byte. */
6585#define VALGRIND_STACK_CHANGE(id, start, end) \
6586 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
6587 id, start, end, 0, 0)
6588
6589/* Load PDB debug info for Wine PE image_map. */
6590#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
6591 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
6592 fd, ptr, total_size, delta, 0)
6593
6594/* Map a code address to a source file name and line number. buf64
6595 must point to a 64-byte buffer in the caller's address space. The
6596 result will be dumped in there and is guaranteed to be zero
6597 terminated. If no info is found, the first byte is set to zero. */
6598#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
6599 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6600 VG_USERREQ__MAP_IP_TO_SRCLOC, \
6601 addr, buf64, 0, 0, 0)
6602
6603/* Disable error reporting for this thread. Behaves in a stack like
6604 way, so you can safely call this multiple times provided that
6605 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
6606 to re-enable reporting. The first call of this macro disables
6607 reporting. Subsequent calls have no effect except to increase the
6608 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
6609 reporting. Child threads do not inherit this setting from their
6610 parents -- they are always created with reporting enabled. */
6611#define VALGRIND_DISABLE_ERROR_REPORTING \
6612 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6613 1, 0, 0, 0, 0)
6614
6615/* Re-enable error reporting, as per comments on
6616 VALGRIND_DISABLE_ERROR_REPORTING. */
6617#define VALGRIND_ENABLE_ERROR_REPORTING \
6618 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6619 -1, 0, 0, 0, 0)
6620
6621/* Execute a monitor command from the client program.
6622 If a connection is opened with GDB, the output will be sent
6623 according to the output mode set for vgdb.
6624 If no connection is opened, output will go to the log output.
6625 Returns 1 if command not recognised, 0 otherwise. */
6626#define VALGRIND_MONITOR_COMMAND(command) \
6627 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
6628 command, 0, 0, 0, 0)
6629
6630
6631#undef PLAT_x86_darwin
6632#undef PLAT_amd64_darwin
6633#undef PLAT_x86_win32
6634#undef PLAT_amd64_win64
6635#undef PLAT_x86_linux
6636#undef PLAT_amd64_linux
6637#undef PLAT_ppc32_linux
6638#undef PLAT_ppc64be_linux
6639#undef PLAT_ppc64le_linux
6640#undef PLAT_arm_linux
6641#undef PLAT_s390x_linux
6642#undef PLAT_mips32_linux
6643#undef PLAT_mips64_linux
6644#undef PLAT_x86_solaris
6645#undef PLAT_amd64_solaris
6646
6647#endif /* __VALGRIND_H */
6648

source code of qtbase/src/testlib/3rdparty/valgrind_p.h