EPICS Base  7.0.6.1
 All Classes Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
valgrind.h
1 /* -*- c -*-
2  ----------------------------------------------------------------
3 
4  Notice that the following BSD-style license applies to this one
5  file (valgrind.h) only. The rest of Valgrind is licensed under the
6  terms of the GNU General Public License, version 2, unless
7  otherwise indicated. See the COPYING file in the source
8  distribution for details.
9 
10  ----------------------------------------------------------------
11 
12  This file is part of Valgrind, a dynamic binary instrumentation
13  framework.
14 
15  Copyright (C) 2000-2013 Julian Seward. All rights reserved.
16 
17  Redistribution and use in source and binary forms, with or without
18  modification, are permitted provided that the following conditions
19  are met:
20 
21  1. Redistributions of source code must retain the above copyright
22  notice, this list of conditions and the following disclaimer.
23 
24  2. The origin of this software must not be misrepresented; you must
25  not claim that you wrote the original software. If you use this
26  software in a product, an acknowledgment in the product
27  documentation would be appreciated but is not required.
28 
29  3. Altered source versions must be plainly marked as such, and must
30  not be misrepresented as being the original software.
31 
32  4. The name of the author may not be used to endorse or promote
33  products derived from this software without specific prior written
34  permission.
35 
36  THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37  OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39  ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40  DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41  DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42  GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44  WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45  NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47 
48  ----------------------------------------------------------------
49 
50  Notice that the above BSD-style license applies to this one file
51  (valgrind.h) only. The entire rest of Valgrind is licensed under
52  the terms of the GNU General Public License, version 2. See the
53  COPYING file in the source distribution for details.
54 
55  ----------------------------------------------------------------
56 */
57 
58 
59 /* This file is for inclusion into client (your!) code.
60 
61  You can use these macros to manipulate and query Valgrind's
62  execution inside your own programs.
63 
64  The resulting executables will still run without Valgrind, just a
65  little bit more slowly than they otherwise would, but otherwise
66  unchanged. When not running on valgrind, each client request
67  consumes very few (eg. 7) instructions, so the resulting performance
68  loss is negligible unless you plan to execute client requests
69  millions of times per second. Nevertheless, if that is still a
70  problem, you can compile with the NVALGRIND symbol defined (gcc
71  -DNVALGRIND) so that client requests are not even compiled in. */
72 
73 #ifndef __VALGRIND_H
74 #define __VALGRIND_H
75 
76 
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
80 
81 /* Specify Valgrind's version number, so that user code can
82  conditionally compile based on our version number. Note that these
83  were introduced at version 3.6 and so do not exist in version 3.5
84  or earlier. The recommended way to use them to check for "version
85  X.Y or later" is (eg)
86 
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88  && (__VALGRIND_MAJOR__ > 3 \
89  || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90 */
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 10
93 
94 
95 #include <stdarg.h>
96 
97 /* Nb: this file might be included in a file compiled with -ansi. So
98  we can't use C++ style "//" comments nor the "asm" keyword (instead
99  use "__asm__"). */
100 
101 /* Derive some tags indicating what the target platform is. Note
102  that in this file we're using the compiler's CPP symbols for
103  identifying architectures, which are different to the ones we use
104  within the rest of Valgrind. Note, __powerpc__ is active for both
105  32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106  latter (on Linux, that is).
107 
108  Misc note: how to find out what's predefined in gcc by default:
109  gcc -Wp,-dM somefile.c
110 */
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_amd64_win64
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64be_linux
119 #undef PLAT_ppc64le_linux
120 #undef PLAT_arm_linux
121 #undef PLAT_arm64_linux
122 #undef PLAT_s390x_linux
123 #undef PLAT_mips32_linux
124 #undef PLAT_mips64_linux
125 
126 
127 #if defined(__APPLE__) && defined(__i386__)
128 # define PLAT_x86_darwin 1
129 #elif defined(__APPLE__) && defined(__x86_64__)
130 # define PLAT_amd64_darwin 1
131 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
132  || defined(__CYGWIN32__) \
133  || (defined(_WIN32) && defined(_M_IX86) && defined(__GNUC__))
134 # define PLAT_x86_win32 1
135 #elif defined(__MINGW64__) \
136  || (defined(_WIN64) && defined(_M_X64) && defined(__GNUC__))
137 # define PLAT_amd64_win64 1
138 #elif defined(__linux__) && defined(__i386__)
139 # define PLAT_x86_linux 1
140 #elif defined(__linux__) && defined(__x86_64__)
141 # define PLAT_amd64_linux 1
142 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
143 # define PLAT_ppc32_linux 1
144 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
145 /* Big Endian uses ELF version 1 */
146 # define PLAT_ppc64be_linux 1
147 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
148 /* Little Endian uses ELF version 2 */
149 # define PLAT_ppc64le_linux 1
150 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
151 # define PLAT_arm_linux 1
152 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
153 # define PLAT_arm64_linux 1
154 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
155 # define PLAT_s390x_linux 1
156 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
157 # define PLAT_mips64_linux 1
158 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
159 # define PLAT_mips32_linux 1
160 #else
161 /* If we're not compiling for our target platform, don't generate
162  any inline asms. */
163 # if !defined(NVALGRIND)
164 # define NVALGRIND 1
165 # endif
166 #endif
167 
168 
169 /* ------------------------------------------------------------------ */
170 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
171 /* in here of use to end-users -- skip to the next section. */
172 /* ------------------------------------------------------------------ */
173 
174 /*
175  * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
176  * request. Accepts both pointers and integers as arguments.
177  *
178  * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
179  * client request that does not return a value.
180 
181  * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
182  * client request and whose value equals the client request result. Accepts
183  * both pointers and integers as arguments. Note that such calls are not
184  * necessarily pure functions -- they may have side effects.
185  */
186 
187 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
188  _zzq_request, _zzq_arg1, _zzq_arg2, \
189  _zzq_arg3, _zzq_arg4, _zzq_arg5) \
190  do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
191  (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
192  (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
193 
194 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
195  _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
196  do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
197  (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
198  (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
199 
200 #if defined(NVALGRIND)
201 
202 /* Define NVALGRIND to completely remove the Valgrind magic sequence
203  from the compiled code (analogous to NDEBUG's effects on
204  assert()) */
205 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
206  _zzq_default, _zzq_request, \
207  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
208  (_zzq_default)
209 
210 #else /* ! NVALGRIND */
211 
212 /* The following defines the magic code sequences which the JITter
213  spots and handles magically. Don't look too closely at them as
214  they will rot your brain.
215 
216  The assembly code sequences for all architectures is in this one
217  file. This is because this file must be stand-alone, and we don't
218  want to have multiple files.
219 
220  For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
221  value gets put in the return slot, so that everything works when
222  this is executed not under Valgrind. Args are passed in a memory
223  block, and so there's no intrinsic limit to the number that could
224  be passed, but it's currently five.
225 
226  The macro args are:
227  _zzq_rlval result lvalue
228  _zzq_default default value (result returned when running on real CPU)
229  _zzq_request request code
230  _zzq_arg1..5 request params
231 
232  The other two macros are used to support function wrapping, and are
233  a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
234  guest's NRADDR pseudo-register and whatever other information is
235  needed to safely run the call original from the wrapper: on
236  ppc64-linux, the R2 value at the divert point is also needed. This
237  information is abstracted into a user-visible type, OrigFn.
238 
239  VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
240  guest, but guarantees that the branch instruction will not be
241  redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
242  branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
243  complete inline asm, since it needs to be combined with more magic
244  inline asm stuff to be useful.
245 */
246 
247 /* ------------------------- x86-{linux,darwin} ---------------- */
248 
249 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
250  || (defined(PLAT_x86_win32) && defined(__GNUC__))
251 
252 typedef
253  struct {
254  unsigned int nraddr; /* where's the code? */
255  }
256  OrigFn;
257 
258 #define __SPECIAL_INSTRUCTION_PREAMBLE \
259  "roll $3, %%edi ; roll $13, %%edi\n\t" \
260  "roll $29, %%edi ; roll $19, %%edi\n\t"
261 
262 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
263  _zzq_default, _zzq_request, \
264  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
265  __extension__ \
266  ({volatile unsigned int _zzq_args[6]; \
267  volatile unsigned int _zzq_result; \
268  _zzq_args[0] = (unsigned int)(_zzq_request); \
269  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
270  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
271  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
272  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
273  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
274  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
275  /* %EDX = client_request ( %EAX ) */ \
276  "xchgl %%ebx,%%ebx" \
277  : "=d" (_zzq_result) \
278  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
279  : "cc", "memory" \
280  ); \
281  _zzq_result; \
282  })
283 
284 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
285  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
286  volatile unsigned int __addr; \
287  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
288  /* %EAX = guest_NRADDR */ \
289  "xchgl %%ecx,%%ecx" \
290  : "=a" (__addr) \
291  : \
292  : "cc", "memory" \
293  ); \
294  _zzq_orig->nraddr = __addr; \
295  }
296 
297 #define VALGRIND_CALL_NOREDIR_EAX \
298  __SPECIAL_INSTRUCTION_PREAMBLE \
299  /* call-noredir *%EAX */ \
300  "xchgl %%edx,%%edx\n\t"
301 
302 #define VALGRIND_VEX_INJECT_IR() \
303  do { \
304  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
305  "xchgl %%edi,%%edi\n\t" \
306  : : : "cc", "memory" \
307  ); \
308  } while (0)
309 
310 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
311 
312 /* ------------------------- x86-Win32 ------------------------- */
313 
314 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
315 
316 typedef
317  struct {
318  unsigned int nraddr; /* where's the code? */
319  }
320  OrigFn;
321 
322 #if defined(_MSC_VER)
323 
324 #define __SPECIAL_INSTRUCTION_PREAMBLE \
325  __asm rol edi, 3 __asm rol edi, 13 \
326  __asm rol edi, 29 __asm rol edi, 19
327 
328 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
329  _zzq_default, _zzq_request, \
330  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
331  valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
332  (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
333  (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
334  (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
335 
336 static __inline uintptr_t
337 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
338  uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
339  uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
340  uintptr_t _zzq_arg5)
341 {
342  volatile uintptr_t _zzq_args[6];
343  volatile unsigned int _zzq_result;
344  _zzq_args[0] = (uintptr_t)(_zzq_request);
345  _zzq_args[1] = (uintptr_t)(_zzq_arg1);
346  _zzq_args[2] = (uintptr_t)(_zzq_arg2);
347  _zzq_args[3] = (uintptr_t)(_zzq_arg3);
348  _zzq_args[4] = (uintptr_t)(_zzq_arg4);
349  _zzq_args[5] = (uintptr_t)(_zzq_arg5);
350  __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
351  __SPECIAL_INSTRUCTION_PREAMBLE
352  /* %EDX = client_request ( %EAX ) */
353  __asm xchg ebx,ebx
354  __asm mov _zzq_result, edx
355  }
356  return _zzq_result;
357 }
358 
359 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
360  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
361  volatile unsigned int __addr; \
362  __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
363  /* %EAX = guest_NRADDR */ \
364  __asm xchg ecx,ecx \
365  __asm mov __addr, eax \
366  } \
367  _zzq_orig->nraddr = __addr; \
368  }
369 
370 #define VALGRIND_CALL_NOREDIR_EAX ERROR
371 
372 #define VALGRIND_VEX_INJECT_IR() \
373  do { \
374  __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
375  __asm xchg edi,edi \
376  } \
377  } while (0)
378 
379 #else
380 #error Unsupported compiler.
381 #endif
382 
383 #endif /* PLAT_x86_win32 */
384 
385 /* ------------------------ amd64-{linux,darwin} --------------- */
386 
387 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
388  || (defined(PLAT_amd64_win64) && defined(__GNUC__))
389 
390 typedef
391  struct {
392  unsigned long long int nraddr; /* where's the code? */
393  }
394  OrigFn;
395 
396 #define __SPECIAL_INSTRUCTION_PREAMBLE \
397  "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
398  "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
399 
400 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
401  _zzq_default, _zzq_request, \
402  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
403  __extension__ \
404  ({ volatile unsigned long long int _zzq_args[6]; \
405  volatile unsigned long long int _zzq_result; \
406  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
407  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
408  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
409  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
410  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
411  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
412  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
413  /* %RDX = client_request ( %RAX ) */ \
414  "xchgq %%rbx,%%rbx" \
415  : "=d" (_zzq_result) \
416  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
417  : "cc", "memory" \
418  ); \
419  _zzq_result; \
420  })
421 
422 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
423  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
424  volatile unsigned long long int __addr; \
425  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
426  /* %RAX = guest_NRADDR */ \
427  "xchgq %%rcx,%%rcx" \
428  : "=a" (__addr) \
429  : \
430  : "cc", "memory" \
431  ); \
432  _zzq_orig->nraddr = __addr; \
433  }
434 
435 #define VALGRIND_CALL_NOREDIR_RAX \
436  __SPECIAL_INSTRUCTION_PREAMBLE \
437  /* call-noredir *%RAX */ \
438  "xchgq %%rdx,%%rdx\n\t"
439 
440 #define VALGRIND_VEX_INJECT_IR() \
441  do { \
442  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
443  "xchgq %%rdi,%%rdi\n\t" \
444  : : : "cc", "memory" \
445  ); \
446  } while (0)
447 
448 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
449 
450 /* ------------------------- amd64-Win64 ------------------------- */
451 
452 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
453 
454 #error Unsupported compiler.
455 
456 #endif /* PLAT_amd64_win64 */
457 
458 /* ------------------------ ppc32-linux ------------------------ */
459 
460 #if defined(PLAT_ppc32_linux)
461 
462 typedef
463  struct {
464  unsigned int nraddr; /* where's the code? */
465  }
466  OrigFn;
467 
468 #define __SPECIAL_INSTRUCTION_PREAMBLE \
469  "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
470  "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
471 
472 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
473  _zzq_default, _zzq_request, \
474  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
475  \
476  __extension__ \
477  ({ unsigned int _zzq_args[6]; \
478  unsigned int _zzq_result; \
479  unsigned int* _zzq_ptr; \
480  _zzq_args[0] = (unsigned int)(_zzq_request); \
481  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
482  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
483  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
484  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
485  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
486  _zzq_ptr = _zzq_args; \
487  __asm__ volatile("mr 3,%1\n\t" /*default*/ \
488  "mr 4,%2\n\t" /*ptr*/ \
489  __SPECIAL_INSTRUCTION_PREAMBLE \
490  /* %R3 = client_request ( %R4 ) */ \
491  "or 1,1,1\n\t" \
492  "mr %0,3" /*result*/ \
493  : "=b" (_zzq_result) \
494  : "b" (_zzq_default), "b" (_zzq_ptr) \
495  : "cc", "memory", "r3", "r4"); \
496  _zzq_result; \
497  })
498 
499 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
500  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
501  unsigned int __addr; \
502  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
503  /* %R3 = guest_NRADDR */ \
504  "or 2,2,2\n\t" \
505  "mr %0,3" \
506  : "=b" (__addr) \
507  : \
508  : "cc", "memory", "r3" \
509  ); \
510  _zzq_orig->nraddr = __addr; \
511  }
512 
513 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
514  __SPECIAL_INSTRUCTION_PREAMBLE \
515  /* branch-and-link-to-noredir *%R11 */ \
516  "or 3,3,3\n\t"
517 
518 #define VALGRIND_VEX_INJECT_IR() \
519  do { \
520  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
521  "or 5,5,5\n\t" \
522  ); \
523  } while (0)
524 
525 #endif /* PLAT_ppc32_linux */
526 
527 /* ------------------------ ppc64-linux ------------------------ */
528 
529 #if defined(PLAT_ppc64be_linux)
530 
531 typedef
532  struct {
533  unsigned long long int nraddr; /* where's the code? */
534  unsigned long long int r2; /* what tocptr do we need? */
535  }
536  OrigFn;
537 
538 #define __SPECIAL_INSTRUCTION_PREAMBLE \
539  "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
540  "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
541 
542 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
543  _zzq_default, _zzq_request, \
544  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
545  \
546  __extension__ \
547  ({ unsigned long long int _zzq_args[6]; \
548  unsigned long long int _zzq_result; \
549  unsigned long long int* _zzq_ptr; \
550  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
551  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
552  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
553  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
554  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
555  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
556  _zzq_ptr = _zzq_args; \
557  __asm__ volatile("mr 3,%1\n\t" /*default*/ \
558  "mr 4,%2\n\t" /*ptr*/ \
559  __SPECIAL_INSTRUCTION_PREAMBLE \
560  /* %R3 = client_request ( %R4 ) */ \
561  "or 1,1,1\n\t" \
562  "mr %0,3" /*result*/ \
563  : "=b" (_zzq_result) \
564  : "b" (_zzq_default), "b" (_zzq_ptr) \
565  : "cc", "memory", "r3", "r4"); \
566  _zzq_result; \
567  })
568 
569 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
570  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
571  unsigned long long int __addr; \
572  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
573  /* %R3 = guest_NRADDR */ \
574  "or 2,2,2\n\t" \
575  "mr %0,3" \
576  : "=b" (__addr) \
577  : \
578  : "cc", "memory", "r3" \
579  ); \
580  _zzq_orig->nraddr = __addr; \
581  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
582  /* %R3 = guest_NRADDR_GPR2 */ \
583  "or 4,4,4\n\t" \
584  "mr %0,3" \
585  : "=b" (__addr) \
586  : \
587  : "cc", "memory", "r3" \
588  ); \
589  _zzq_orig->r2 = __addr; \
590  }
591 
592 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
593  __SPECIAL_INSTRUCTION_PREAMBLE \
594  /* branch-and-link-to-noredir *%R11 */ \
595  "or 3,3,3\n\t"
596 
597 #define VALGRIND_VEX_INJECT_IR() \
598  do { \
599  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
600  "or 5,5,5\n\t" \
601  ); \
602  } while (0)
603 
604 #endif /* PLAT_ppc64be_linux */
605 
606 #if defined(PLAT_ppc64le_linux)
607 
608 typedef
609  struct {
610  unsigned long long int nraddr; /* where's the code? */
611  unsigned long long int r2; /* what tocptr do we need? */
612  }
613  OrigFn;
614 
615 #define __SPECIAL_INSTRUCTION_PREAMBLE \
616  "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
617  "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
618 
619 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
620  _zzq_default, _zzq_request, \
621  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
622  \
623  __extension__ \
624  ({ unsigned long long int _zzq_args[6]; \
625  unsigned long long int _zzq_result; \
626  unsigned long long int* _zzq_ptr; \
627  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
628  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
629  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
630  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
631  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
632  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
633  _zzq_ptr = _zzq_args; \
634  __asm__ volatile("mr 3,%1\n\t" /*default*/ \
635  "mr 4,%2\n\t" /*ptr*/ \
636  __SPECIAL_INSTRUCTION_PREAMBLE \
637  /* %R3 = client_request ( %R4 ) */ \
638  "or 1,1,1\n\t" \
639  "mr %0,3" /*result*/ \
640  : "=b" (_zzq_result) \
641  : "b" (_zzq_default), "b" (_zzq_ptr) \
642  : "cc", "memory", "r3", "r4"); \
643  _zzq_result; \
644  })
645 
646 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
647  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
648  unsigned long long int __addr; \
649  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
650  /* %R3 = guest_NRADDR */ \
651  "or 2,2,2\n\t" \
652  "mr %0,3" \
653  : "=b" (__addr) \
654  : \
655  : "cc", "memory", "r3" \
656  ); \
657  _zzq_orig->nraddr = __addr; \
658  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
659  /* %R3 = guest_NRADDR_GPR2 */ \
660  "or 4,4,4\n\t" \
661  "mr %0,3" \
662  : "=b" (__addr) \
663  : \
664  : "cc", "memory", "r3" \
665  ); \
666  _zzq_orig->r2 = __addr; \
667  }
668 
669 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
670  __SPECIAL_INSTRUCTION_PREAMBLE \
671  /* branch-and-link-to-noredir *%R12 */ \
672  "or 3,3,3\n\t"
673 
674 #define VALGRIND_VEX_INJECT_IR() \
675  do { \
676  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
677  "or 5,5,5\n\t" \
678  ); \
679  } while (0)
680 
681 #endif /* PLAT_ppc64le_linux */
682 
683 /* ------------------------- arm-linux ------------------------- */
684 
685 #if defined(PLAT_arm_linux)
686 
687 typedef
688  struct {
689  unsigned int nraddr; /* where's the code? */
690  }
691  OrigFn;
692 
693 #define __SPECIAL_INSTRUCTION_PREAMBLE \
694  "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
695  "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
696 
697 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
698  _zzq_default, _zzq_request, \
699  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
700  \
701  __extension__ \
702  ({volatile unsigned int _zzq_args[6]; \
703  volatile unsigned int _zzq_result; \
704  _zzq_args[0] = (unsigned int)(_zzq_request); \
705  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
706  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
707  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
708  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
709  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
710  __asm__ volatile("mov r3, %1\n\t" /*default*/ \
711  "mov r4, %2\n\t" /*ptr*/ \
712  __SPECIAL_INSTRUCTION_PREAMBLE \
713  /* R3 = client_request ( R4 ) */ \
714  "orr r10, r10, r10\n\t" \
715  "mov %0, r3" /*result*/ \
716  : "=r" (_zzq_result) \
717  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
718  : "cc","memory", "r3", "r4"); \
719  _zzq_result; \
720  })
721 
722 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
723  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
724  unsigned int __addr; \
725  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
726  /* R3 = guest_NRADDR */ \
727  "orr r11, r11, r11\n\t" \
728  "mov %0, r3" \
729  : "=r" (__addr) \
730  : \
731  : "cc", "memory", "r3" \
732  ); \
733  _zzq_orig->nraddr = __addr; \
734  }
735 
736 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
737  __SPECIAL_INSTRUCTION_PREAMBLE \
738  /* branch-and-link-to-noredir *%R4 */ \
739  "orr r12, r12, r12\n\t"
740 
741 #define VALGRIND_VEX_INJECT_IR() \
742  do { \
743  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
744  "orr r9, r9, r9\n\t" \
745  : : : "cc", "memory" \
746  ); \
747  } while (0)
748 
749 #endif /* PLAT_arm_linux */
750 
751 /* ------------------------ arm64-linux ------------------------- */
752 
753 #if defined(PLAT_arm64_linux)
754 
755 typedef
756  struct {
757  unsigned long long int nraddr; /* where's the code? */
758  }
759  OrigFn;
760 
761 #define __SPECIAL_INSTRUCTION_PREAMBLE \
762  "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
763  "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
764 
765 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
766  _zzq_default, _zzq_request, \
767  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
768  \
769  __extension__ \
770  ({volatile unsigned long long int _zzq_args[6]; \
771  volatile unsigned long long int _zzq_result; \
772  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
773  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
774  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
775  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
776  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
777  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
778  __asm__ volatile("mov x3, %1\n\t" /*default*/ \
779  "mov x4, %2\n\t" /*ptr*/ \
780  __SPECIAL_INSTRUCTION_PREAMBLE \
781  /* X3 = client_request ( X4 ) */ \
782  "orr x10, x10, x10\n\t" \
783  "mov %0, x3" /*result*/ \
784  : "=r" (_zzq_result) \
785  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
786  : "cc","memory", "x3", "x4"); \
787  _zzq_result; \
788  })
789 
790 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
791  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
792  unsigned long long int __addr; \
793  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
794  /* X3 = guest_NRADDR */ \
795  "orr x11, x11, x11\n\t" \
796  "mov %0, x3" \
797  : "=r" (__addr) \
798  : \
799  : "cc", "memory", "x3" \
800  ); \
801  _zzq_orig->nraddr = __addr; \
802  }
803 
804 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
805  __SPECIAL_INSTRUCTION_PREAMBLE \
806  /* branch-and-link-to-noredir X8 */ \
807  "orr x12, x12, x12\n\t"
808 
809 #define VALGRIND_VEX_INJECT_IR() \
810  do { \
811  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
812  "orr x9, x9, x9\n\t" \
813  : : : "cc", "memory" \
814  ); \
815  } while (0)
816 
817 #endif /* PLAT_arm64_linux */
818 
819 /* ------------------------ s390x-linux ------------------------ */
820 
821 #if defined(PLAT_s390x_linux)
822 
823 typedef
824  struct {
825  unsigned long long int nraddr; /* where's the code? */
826  }
827  OrigFn;
828 
829 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
830  * code. This detection is implemented in platform specific toIR.c
831  * (e.g. VEX/priv/guest_s390_decoder.c).
832  */
833 #define __SPECIAL_INSTRUCTION_PREAMBLE \
834  "lr 15,15\n\t" \
835  "lr 1,1\n\t" \
836  "lr 2,2\n\t" \
837  "lr 3,3\n\t"
838 
839 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
840 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
841 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
842 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
843 
844 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
845  _zzq_default, _zzq_request, \
846  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
847  __extension__ \
848  ({volatile unsigned long long int _zzq_args[6]; \
849  volatile unsigned long long int _zzq_result; \
850  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
851  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
852  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
853  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
854  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
855  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
856  __asm__ volatile(/* r2 = args */ \
857  "lgr 2,%1\n\t" \
858  /* r3 = default */ \
859  "lgr 3,%2\n\t" \
860  __SPECIAL_INSTRUCTION_PREAMBLE \
861  __CLIENT_REQUEST_CODE \
862  /* results = r3 */ \
863  "lgr %0, 3\n\t" \
864  : "=d" (_zzq_result) \
865  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
866  : "cc", "2", "3", "memory" \
867  ); \
868  _zzq_result; \
869  })
870 
871 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
872  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
873  volatile unsigned long long int __addr; \
874  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
875  __GET_NR_CONTEXT_CODE \
876  "lgr %0, 3\n\t" \
877  : "=a" (__addr) \
878  : \
879  : "cc", "3", "memory" \
880  ); \
881  _zzq_orig->nraddr = __addr; \
882  }
883 
884 #define VALGRIND_CALL_NOREDIR_R1 \
885  __SPECIAL_INSTRUCTION_PREAMBLE \
886  __CALL_NO_REDIR_CODE
887 
888 #define VALGRIND_VEX_INJECT_IR() \
889  do { \
890  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
891  __VEX_INJECT_IR_CODE); \
892  } while (0)
893 
894 #endif /* PLAT_s390x_linux */
895 
896 /* ------------------------- mips32-linux ---------------- */
897 
898 #if defined(PLAT_mips32_linux)
899 
900 typedef
901  struct {
902  unsigned int nraddr; /* where's the code? */
903  }
904  OrigFn;
905 
906 /* .word 0x342
907  * .word 0x742
908  * .word 0xC2
909  * .word 0x4C2*/
910 #define __SPECIAL_INSTRUCTION_PREAMBLE \
911  "srl $0, $0, 13\n\t" \
912  "srl $0, $0, 29\n\t" \
913  "srl $0, $0, 3\n\t" \
914  "srl $0, $0, 19\n\t"
915 
916 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
917  _zzq_default, _zzq_request, \
918  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
919  __extension__ \
920  ({ volatile unsigned int _zzq_args[6]; \
921  volatile unsigned int _zzq_result; \
922  _zzq_args[0] = (unsigned int)(_zzq_request); \
923  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
924  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
925  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
926  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
927  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
928  __asm__ volatile("move $11, %1\n\t" /*default*/ \
929  "move $12, %2\n\t" /*ptr*/ \
930  __SPECIAL_INSTRUCTION_PREAMBLE \
931  /* T3 = client_request ( T4 ) */ \
932  "or $13, $13, $13\n\t" \
933  "move %0, $11\n\t" /*result*/ \
934  : "=r" (_zzq_result) \
935  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
936  : "$11", "$12"); \
937  _zzq_result; \
938  })
939 
940 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
941  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
942  volatile unsigned int __addr; \
943  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
944  /* %t9 = guest_NRADDR */ \
945  "or $14, $14, $14\n\t" \
946  "move %0, $11" /*result*/ \
947  : "=r" (__addr) \
948  : \
949  : "$11" \
950  ); \
951  _zzq_orig->nraddr = __addr; \
952  }
953 
954 #define VALGRIND_CALL_NOREDIR_T9 \
955  __SPECIAL_INSTRUCTION_PREAMBLE \
956  /* call-noredir *%t9 */ \
957  "or $15, $15, $15\n\t"
958 
959 #define VALGRIND_VEX_INJECT_IR() \
960  do { \
961  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
962  "or $11, $11, $11\n\t" \
963  ); \
964  } while (0)
965 
966 
967 #endif /* PLAT_mips32_linux */
968 
969 /* ------------------------- mips64-linux ---------------- */
970 
971 #if defined(PLAT_mips64_linux)
972 
973 typedef
974  struct {
975  unsigned long long nraddr; /* where's the code? */
976  }
977  OrigFn;
978 
979 /* dsll $0,$0, 3
980  * dsll $0,$0, 13
981  * dsll $0,$0, 29
982  * dsll $0,$0, 19*/
983 #define __SPECIAL_INSTRUCTION_PREAMBLE \
984  "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
985  "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
986 
987 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
988  _zzq_default, _zzq_request, \
989  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
990  __extension__ \
991  ({ volatile unsigned long long int _zzq_args[6]; \
992  volatile unsigned long long int _zzq_result; \
993  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
994  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
995  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
996  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
997  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
998  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
999  __asm__ volatile("move $11, %1\n\t" /*default*/ \
1000  "move $12, %2\n\t" /*ptr*/ \
1001  __SPECIAL_INSTRUCTION_PREAMBLE \
1002  /* $11 = client_request ( $12 ) */ \
1003  "or $13, $13, $13\n\t" \
1004  "move %0, $11\n\t" /*result*/ \
1005  : "=r" (_zzq_result) \
1006  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1007  : "$11", "$12"); \
1008  _zzq_result; \
1009  })
1010 
1011 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1012  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1013  volatile unsigned long long int __addr; \
1014  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1015  /* $11 = guest_NRADDR */ \
1016  "or $14, $14, $14\n\t" \
1017  "move %0, $11" /*result*/ \
1018  : "=r" (__addr) \
1019  : \
1020  : "$11"); \
1021  _zzq_orig->nraddr = __addr; \
1022  }
1023 
1024 #define VALGRIND_CALL_NOREDIR_T9 \
1025  __SPECIAL_INSTRUCTION_PREAMBLE \
1026  /* call-noredir $25 */ \
1027  "or $15, $15, $15\n\t"
1028 
1029 #define VALGRIND_VEX_INJECT_IR() \
1030  do { \
1031  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1032  "or $11, $11, $11\n\t" \
1033  ); \
1034  } while (0)
1035 
1036 #endif /* PLAT_mips64_linux */
1037 
1038 /* Insert assembly code for other platforms here... */
1039 
1040 #endif /* NVALGRIND */
1041 
1042 
1043 /* ------------------------------------------------------------------ */
1044 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1045 /* ugly. It's the least-worst tradeoff I can think of. */
1046 /* ------------------------------------------------------------------ */
1047 
1048 /* This section defines magic (a.k.a appalling-hack) macros for doing
1049  guaranteed-no-redirection macros, so as to get from function
1050  wrappers to the functions they are wrapping. The whole point is to
1051  construct standard call sequences, but to do the call itself with a
1052  special no-redirect call pseudo-instruction that the JIT
1053  understands and handles specially. This section is long and
1054  repetitious, and I can't see a way to make it shorter.
1055 
1056  The naming scheme is as follows:
1057 
1058  CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1059 
1060  'W' stands for "word" and 'v' for "void". Hence there are
1061  different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1062  and for each, the possibility of returning a word-typed result, or
1063  no result.
1064 */
1065 
1066 /* Use these to write the name of your wrapper. NOTE: duplicates
1067  VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1068  the default behavior equivalence class tag "0000" into the name.
1069  See pub_tool_redir.h for details -- normally you don't need to
1070  think about this, though. */
1071 
1072 /* Use an extra level of macroisation so as to ensure the soname/fnname
1073  args are fully macro-expanded before pasting them together. */
1074 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1075 
1076 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1077  VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1078 
1079 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1080  VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1081 
1082 /* Use this macro from within a wrapper function to collect the
1083  context (address and possibly other info) of the original function.
1084  Once you have that you can then use it in one of the CALL_FN_
1085  macros. The type of the argument _lval is OrigFn. */
1086 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1087 
1088 /* Also provide end-user facilities for function replacement, rather
1089  than wrapping. A replacement function differs from a wrapper in
1090  that it has no way to get hold of the original function being
1091  called, and hence no way to call onwards to it. In a replacement
1092  function, VALGRIND_GET_ORIG_FN always returns zero. */
1093 
1094 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1095  VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1096 
1097 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1098  VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1099 
1100 /* Derivatives of the main macros below, for calling functions
1101  returning void. */
1102 
1103 #define CALL_FN_v_v(fnptr) \
1104  do { volatile unsigned long _junk; \
1105  CALL_FN_W_v(_junk,fnptr); } while (0)
1106 
1107 #define CALL_FN_v_W(fnptr, arg1) \
1108  do { volatile unsigned long _junk; \
1109  CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1110 
1111 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1112  do { volatile unsigned long _junk; \
1113  CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1114 
1115 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1116  do { volatile unsigned long _junk; \
1117  CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1118 
1119 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1120  do { volatile unsigned long _junk; \
1121  CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1122 
1123 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1124  do { volatile unsigned long _junk; \
1125  CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1126 
1127 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1128  do { volatile unsigned long _junk; \
1129  CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1130 
1131 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1132  do { volatile unsigned long _junk; \
1133  CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1134 
1135 /* ------------------------- x86-{linux,darwin} ---------------- */
1136 
1137 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1138 
1139 /* These regs are trashed by the hidden call. No need to mention eax
1140  as gcc can already see that, plus causes gcc to bomb. */
1141 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1142 
1143 /* Macros to save and align the stack before making a function
1144  call and restore it afterwards as gcc may not keep the stack
1145  pointer aligned if it doesn't realize calls are being made
1146  to other functions. */
1147 
1148 #define VALGRIND_ALIGN_STACK \
1149  "movl %%esp,%%edi\n\t" \
1150  "andl $0xfffffff0,%%esp\n\t"
1151 #define VALGRIND_RESTORE_STACK \
1152  "movl %%edi,%%esp\n\t"
1153 
1154 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1155  long) == 4. */
1156 
1157 #define CALL_FN_W_v(lval, orig) \
1158  do { \
1159  volatile OrigFn _orig = (orig); \
1160  volatile unsigned long _argvec[1]; \
1161  volatile unsigned long _res; \
1162  _argvec[0] = (unsigned long)_orig.nraddr; \
1163  __asm__ volatile( \
1164  VALGRIND_ALIGN_STACK \
1165  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1166  VALGRIND_CALL_NOREDIR_EAX \
1167  VALGRIND_RESTORE_STACK \
1168  : /*out*/ "=a" (_res) \
1169  : /*in*/ "a" (&_argvec[0]) \
1170  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1171  ); \
1172  lval = (__typeof__(lval)) _res; \
1173  } while (0)
1174 
1175 #define CALL_FN_W_W(lval, orig, arg1) \
1176  do { \
1177  volatile OrigFn _orig = (orig); \
1178  volatile unsigned long _argvec[2]; \
1179  volatile unsigned long _res; \
1180  _argvec[0] = (unsigned long)_orig.nraddr; \
1181  _argvec[1] = (unsigned long)(arg1); \
1182  __asm__ volatile( \
1183  VALGRIND_ALIGN_STACK \
1184  "subl $12, %%esp\n\t" \
1185  "pushl 4(%%eax)\n\t" \
1186  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1187  VALGRIND_CALL_NOREDIR_EAX \
1188  VALGRIND_RESTORE_STACK \
1189  : /*out*/ "=a" (_res) \
1190  : /*in*/ "a" (&_argvec[0]) \
1191  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1192  ); \
1193  lval = (__typeof__(lval)) _res; \
1194  } while (0)
1195 
1196 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1197  do { \
1198  volatile OrigFn _orig = (orig); \
1199  volatile unsigned long _argvec[3]; \
1200  volatile unsigned long _res; \
1201  _argvec[0] = (unsigned long)_orig.nraddr; \
1202  _argvec[1] = (unsigned long)(arg1); \
1203  _argvec[2] = (unsigned long)(arg2); \
1204  __asm__ volatile( \
1205  VALGRIND_ALIGN_STACK \
1206  "subl $8, %%esp\n\t" \
1207  "pushl 8(%%eax)\n\t" \
1208  "pushl 4(%%eax)\n\t" \
1209  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1210  VALGRIND_CALL_NOREDIR_EAX \
1211  VALGRIND_RESTORE_STACK \
1212  : /*out*/ "=a" (_res) \
1213  : /*in*/ "a" (&_argvec[0]) \
1214  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1215  ); \
1216  lval = (__typeof__(lval)) _res; \
1217  } while (0)
1218 
1219 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1220  do { \
1221  volatile OrigFn _orig = (orig); \
1222  volatile unsigned long _argvec[4]; \
1223  volatile unsigned long _res; \
1224  _argvec[0] = (unsigned long)_orig.nraddr; \
1225  _argvec[1] = (unsigned long)(arg1); \
1226  _argvec[2] = (unsigned long)(arg2); \
1227  _argvec[3] = (unsigned long)(arg3); \
1228  __asm__ volatile( \
1229  VALGRIND_ALIGN_STACK \
1230  "subl $4, %%esp\n\t" \
1231  "pushl 12(%%eax)\n\t" \
1232  "pushl 8(%%eax)\n\t" \
1233  "pushl 4(%%eax)\n\t" \
1234  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1235  VALGRIND_CALL_NOREDIR_EAX \
1236  VALGRIND_RESTORE_STACK \
1237  : /*out*/ "=a" (_res) \
1238  : /*in*/ "a" (&_argvec[0]) \
1239  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1240  ); \
1241  lval = (__typeof__(lval)) _res; \
1242  } while (0)
1243 
1244 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1245  do { \
1246  volatile OrigFn _orig = (orig); \
1247  volatile unsigned long _argvec[5]; \
1248  volatile unsigned long _res; \
1249  _argvec[0] = (unsigned long)_orig.nraddr; \
1250  _argvec[1] = (unsigned long)(arg1); \
1251  _argvec[2] = (unsigned long)(arg2); \
1252  _argvec[3] = (unsigned long)(arg3); \
1253  _argvec[4] = (unsigned long)(arg4); \
1254  __asm__ volatile( \
1255  VALGRIND_ALIGN_STACK \
1256  "pushl 16(%%eax)\n\t" \
1257  "pushl 12(%%eax)\n\t" \
1258  "pushl 8(%%eax)\n\t" \
1259  "pushl 4(%%eax)\n\t" \
1260  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1261  VALGRIND_CALL_NOREDIR_EAX \
1262  VALGRIND_RESTORE_STACK \
1263  : /*out*/ "=a" (_res) \
1264  : /*in*/ "a" (&_argvec[0]) \
1265  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1266  ); \
1267  lval = (__typeof__(lval)) _res; \
1268  } while (0)
1269 
1270 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1271  do { \
1272  volatile OrigFn _orig = (orig); \
1273  volatile unsigned long _argvec[6]; \
1274  volatile unsigned long _res; \
1275  _argvec[0] = (unsigned long)_orig.nraddr; \
1276  _argvec[1] = (unsigned long)(arg1); \
1277  _argvec[2] = (unsigned long)(arg2); \
1278  _argvec[3] = (unsigned long)(arg3); \
1279  _argvec[4] = (unsigned long)(arg4); \
1280  _argvec[5] = (unsigned long)(arg5); \
1281  __asm__ volatile( \
1282  VALGRIND_ALIGN_STACK \
1283  "subl $12, %%esp\n\t" \
1284  "pushl 20(%%eax)\n\t" \
1285  "pushl 16(%%eax)\n\t" \
1286  "pushl 12(%%eax)\n\t" \
1287  "pushl 8(%%eax)\n\t" \
1288  "pushl 4(%%eax)\n\t" \
1289  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1290  VALGRIND_CALL_NOREDIR_EAX \
1291  VALGRIND_RESTORE_STACK \
1292  : /*out*/ "=a" (_res) \
1293  : /*in*/ "a" (&_argvec[0]) \
1294  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1295  ); \
1296  lval = (__typeof__(lval)) _res; \
1297  } while (0)
1298 
1299 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1300  do { \
1301  volatile OrigFn _orig = (orig); \
1302  volatile unsigned long _argvec[7]; \
1303  volatile unsigned long _res; \
1304  _argvec[0] = (unsigned long)_orig.nraddr; \
1305  _argvec[1] = (unsigned long)(arg1); \
1306  _argvec[2] = (unsigned long)(arg2); \
1307  _argvec[3] = (unsigned long)(arg3); \
1308  _argvec[4] = (unsigned long)(arg4); \
1309  _argvec[5] = (unsigned long)(arg5); \
1310  _argvec[6] = (unsigned long)(arg6); \
1311  __asm__ volatile( \
1312  VALGRIND_ALIGN_STACK \
1313  "subl $8, %%esp\n\t" \
1314  "pushl 24(%%eax)\n\t" \
1315  "pushl 20(%%eax)\n\t" \
1316  "pushl 16(%%eax)\n\t" \
1317  "pushl 12(%%eax)\n\t" \
1318  "pushl 8(%%eax)\n\t" \
1319  "pushl 4(%%eax)\n\t" \
1320  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1321  VALGRIND_CALL_NOREDIR_EAX \
1322  VALGRIND_RESTORE_STACK \
1323  : /*out*/ "=a" (_res) \
1324  : /*in*/ "a" (&_argvec[0]) \
1325  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1326  ); \
1327  lval = (__typeof__(lval)) _res; \
1328  } while (0)
1329 
1330 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1331  arg7) \
1332  do { \
1333  volatile OrigFn _orig = (orig); \
1334  volatile unsigned long _argvec[8]; \
1335  volatile unsigned long _res; \
1336  _argvec[0] = (unsigned long)_orig.nraddr; \
1337  _argvec[1] = (unsigned long)(arg1); \
1338  _argvec[2] = (unsigned long)(arg2); \
1339  _argvec[3] = (unsigned long)(arg3); \
1340  _argvec[4] = (unsigned long)(arg4); \
1341  _argvec[5] = (unsigned long)(arg5); \
1342  _argvec[6] = (unsigned long)(arg6); \
1343  _argvec[7] = (unsigned long)(arg7); \
1344  __asm__ volatile( \
1345  VALGRIND_ALIGN_STACK \
1346  "subl $4, %%esp\n\t" \
1347  "pushl 28(%%eax)\n\t" \
1348  "pushl 24(%%eax)\n\t" \
1349  "pushl 20(%%eax)\n\t" \
1350  "pushl 16(%%eax)\n\t" \
1351  "pushl 12(%%eax)\n\t" \
1352  "pushl 8(%%eax)\n\t" \
1353  "pushl 4(%%eax)\n\t" \
1354  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1355  VALGRIND_CALL_NOREDIR_EAX \
1356  VALGRIND_RESTORE_STACK \
1357  : /*out*/ "=a" (_res) \
1358  : /*in*/ "a" (&_argvec[0]) \
1359  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1360  ); \
1361  lval = (__typeof__(lval)) _res; \
1362  } while (0)
1363 
1364 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1365  arg7,arg8) \
1366  do { \
1367  volatile OrigFn _orig = (orig); \
1368  volatile unsigned long _argvec[9]; \
1369  volatile unsigned long _res; \
1370  _argvec[0] = (unsigned long)_orig.nraddr; \
1371  _argvec[1] = (unsigned long)(arg1); \
1372  _argvec[2] = (unsigned long)(arg2); \
1373  _argvec[3] = (unsigned long)(arg3); \
1374  _argvec[4] = (unsigned long)(arg4); \
1375  _argvec[5] = (unsigned long)(arg5); \
1376  _argvec[6] = (unsigned long)(arg6); \
1377  _argvec[7] = (unsigned long)(arg7); \
1378  _argvec[8] = (unsigned long)(arg8); \
1379  __asm__ volatile( \
1380  VALGRIND_ALIGN_STACK \
1381  "pushl 32(%%eax)\n\t" \
1382  "pushl 28(%%eax)\n\t" \
1383  "pushl 24(%%eax)\n\t" \
1384  "pushl 20(%%eax)\n\t" \
1385  "pushl 16(%%eax)\n\t" \
1386  "pushl 12(%%eax)\n\t" \
1387  "pushl 8(%%eax)\n\t" \
1388  "pushl 4(%%eax)\n\t" \
1389  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1390  VALGRIND_CALL_NOREDIR_EAX \
1391  VALGRIND_RESTORE_STACK \
1392  : /*out*/ "=a" (_res) \
1393  : /*in*/ "a" (&_argvec[0]) \
1394  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1395  ); \
1396  lval = (__typeof__(lval)) _res; \
1397  } while (0)
1398 
1399 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1400  arg7,arg8,arg9) \
1401  do { \
1402  volatile OrigFn _orig = (orig); \
1403  volatile unsigned long _argvec[10]; \
1404  volatile unsigned long _res; \
1405  _argvec[0] = (unsigned long)_orig.nraddr; \
1406  _argvec[1] = (unsigned long)(arg1); \
1407  _argvec[2] = (unsigned long)(arg2); \
1408  _argvec[3] = (unsigned long)(arg3); \
1409  _argvec[4] = (unsigned long)(arg4); \
1410  _argvec[5] = (unsigned long)(arg5); \
1411  _argvec[6] = (unsigned long)(arg6); \
1412  _argvec[7] = (unsigned long)(arg7); \
1413  _argvec[8] = (unsigned long)(arg8); \
1414  _argvec[9] = (unsigned long)(arg9); \
1415  __asm__ volatile( \
1416  VALGRIND_ALIGN_STACK \
1417  "subl $12, %%esp\n\t" \
1418  "pushl 36(%%eax)\n\t" \
1419  "pushl 32(%%eax)\n\t" \
1420  "pushl 28(%%eax)\n\t" \
1421  "pushl 24(%%eax)\n\t" \
1422  "pushl 20(%%eax)\n\t" \
1423  "pushl 16(%%eax)\n\t" \
1424  "pushl 12(%%eax)\n\t" \
1425  "pushl 8(%%eax)\n\t" \
1426  "pushl 4(%%eax)\n\t" \
1427  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1428  VALGRIND_CALL_NOREDIR_EAX \
1429  VALGRIND_RESTORE_STACK \
1430  : /*out*/ "=a" (_res) \
1431  : /*in*/ "a" (&_argvec[0]) \
1432  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1433  ); \
1434  lval = (__typeof__(lval)) _res; \
1435  } while (0)
1436 
1437 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1438  arg7,arg8,arg9,arg10) \
1439  do { \
1440  volatile OrigFn _orig = (orig); \
1441  volatile unsigned long _argvec[11]; \
1442  volatile unsigned long _res; \
1443  _argvec[0] = (unsigned long)_orig.nraddr; \
1444  _argvec[1] = (unsigned long)(arg1); \
1445  _argvec[2] = (unsigned long)(arg2); \
1446  _argvec[3] = (unsigned long)(arg3); \
1447  _argvec[4] = (unsigned long)(arg4); \
1448  _argvec[5] = (unsigned long)(arg5); \
1449  _argvec[6] = (unsigned long)(arg6); \
1450  _argvec[7] = (unsigned long)(arg7); \
1451  _argvec[8] = (unsigned long)(arg8); \
1452  _argvec[9] = (unsigned long)(arg9); \
1453  _argvec[10] = (unsigned long)(arg10); \
1454  __asm__ volatile( \
1455  VALGRIND_ALIGN_STACK \
1456  "subl $8, %%esp\n\t" \
1457  "pushl 40(%%eax)\n\t" \
1458  "pushl 36(%%eax)\n\t" \
1459  "pushl 32(%%eax)\n\t" \
1460  "pushl 28(%%eax)\n\t" \
1461  "pushl 24(%%eax)\n\t" \
1462  "pushl 20(%%eax)\n\t" \
1463  "pushl 16(%%eax)\n\t" \
1464  "pushl 12(%%eax)\n\t" \
1465  "pushl 8(%%eax)\n\t" \
1466  "pushl 4(%%eax)\n\t" \
1467  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1468  VALGRIND_CALL_NOREDIR_EAX \
1469  VALGRIND_RESTORE_STACK \
1470  : /*out*/ "=a" (_res) \
1471  : /*in*/ "a" (&_argvec[0]) \
1472  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1473  ); \
1474  lval = (__typeof__(lval)) _res; \
1475  } while (0)
1476 
1477 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1478  arg6,arg7,arg8,arg9,arg10, \
1479  arg11) \
1480  do { \
1481  volatile OrigFn _orig = (orig); \
1482  volatile unsigned long _argvec[12]; \
1483  volatile unsigned long _res; \
1484  _argvec[0] = (unsigned long)_orig.nraddr; \
1485  _argvec[1] = (unsigned long)(arg1); \
1486  _argvec[2] = (unsigned long)(arg2); \
1487  _argvec[3] = (unsigned long)(arg3); \
1488  _argvec[4] = (unsigned long)(arg4); \
1489  _argvec[5] = (unsigned long)(arg5); \
1490  _argvec[6] = (unsigned long)(arg6); \
1491  _argvec[7] = (unsigned long)(arg7); \
1492  _argvec[8] = (unsigned long)(arg8); \
1493  _argvec[9] = (unsigned long)(arg9); \
1494  _argvec[10] = (unsigned long)(arg10); \
1495  _argvec[11] = (unsigned long)(arg11); \
1496  __asm__ volatile( \
1497  VALGRIND_ALIGN_STACK \
1498  "subl $4, %%esp\n\t" \
1499  "pushl 44(%%eax)\n\t" \
1500  "pushl 40(%%eax)\n\t" \
1501  "pushl 36(%%eax)\n\t" \
1502  "pushl 32(%%eax)\n\t" \
1503  "pushl 28(%%eax)\n\t" \
1504  "pushl 24(%%eax)\n\t" \
1505  "pushl 20(%%eax)\n\t" \
1506  "pushl 16(%%eax)\n\t" \
1507  "pushl 12(%%eax)\n\t" \
1508  "pushl 8(%%eax)\n\t" \
1509  "pushl 4(%%eax)\n\t" \
1510  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1511  VALGRIND_CALL_NOREDIR_EAX \
1512  VALGRIND_RESTORE_STACK \
1513  : /*out*/ "=a" (_res) \
1514  : /*in*/ "a" (&_argvec[0]) \
1515  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1516  ); \
1517  lval = (__typeof__(lval)) _res; \
1518  } while (0)
1519 
1520 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1521  arg6,arg7,arg8,arg9,arg10, \
1522  arg11,arg12) \
1523  do { \
1524  volatile OrigFn _orig = (orig); \
1525  volatile unsigned long _argvec[13]; \
1526  volatile unsigned long _res; \
1527  _argvec[0] = (unsigned long)_orig.nraddr; \
1528  _argvec[1] = (unsigned long)(arg1); \
1529  _argvec[2] = (unsigned long)(arg2); \
1530  _argvec[3] = (unsigned long)(arg3); \
1531  _argvec[4] = (unsigned long)(arg4); \
1532  _argvec[5] = (unsigned long)(arg5); \
1533  _argvec[6] = (unsigned long)(arg6); \
1534  _argvec[7] = (unsigned long)(arg7); \
1535  _argvec[8] = (unsigned long)(arg8); \
1536  _argvec[9] = (unsigned long)(arg9); \
1537  _argvec[10] = (unsigned long)(arg10); \
1538  _argvec[11] = (unsigned long)(arg11); \
1539  _argvec[12] = (unsigned long)(arg12); \
1540  __asm__ volatile( \
1541  VALGRIND_ALIGN_STACK \
1542  "pushl 48(%%eax)\n\t" \
1543  "pushl 44(%%eax)\n\t" \
1544  "pushl 40(%%eax)\n\t" \
1545  "pushl 36(%%eax)\n\t" \
1546  "pushl 32(%%eax)\n\t" \
1547  "pushl 28(%%eax)\n\t" \
1548  "pushl 24(%%eax)\n\t" \
1549  "pushl 20(%%eax)\n\t" \
1550  "pushl 16(%%eax)\n\t" \
1551  "pushl 12(%%eax)\n\t" \
1552  "pushl 8(%%eax)\n\t" \
1553  "pushl 4(%%eax)\n\t" \
1554  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1555  VALGRIND_CALL_NOREDIR_EAX \
1556  VALGRIND_RESTORE_STACK \
1557  : /*out*/ "=a" (_res) \
1558  : /*in*/ "a" (&_argvec[0]) \
1559  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1560  ); \
1561  lval = (__typeof__(lval)) _res; \
1562  } while (0)
1563 
1564 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1565 
1566 /* ------------------------ amd64-{linux,darwin} --------------- */
1567 
1568 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1569 
1570 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1571 
1572 /* These regs are trashed by the hidden call. */
1573 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1574  "rdi", "r8", "r9", "r10", "r11"
1575 
1576 /* This is all pretty complex. It's so as to make stack unwinding
1577  work reliably. See bug 243270. The basic problem is the sub and
1578  add of 128 of %rsp in all of the following macros. If gcc believes
1579  the CFA is in %rsp, then unwinding may fail, because what's at the
1580  CFA is not what gcc "expected" when it constructs the CFIs for the
1581  places where the macros are instantiated.
1582 
1583  But we can't just add a CFI annotation to increase the CFA offset
1584  by 128, to match the sub of 128 from %rsp, because we don't know
1585  whether gcc has chosen %rsp as the CFA at that point, or whether it
1586  has chosen some other register (eg, %rbp). In the latter case,
1587  adding a CFI annotation to change the CFA offset is simply wrong.
1588 
1589  So the solution is to get hold of the CFA using
1590  __builtin_dwarf_cfa(), put it in a known register, and add a
1591  CFI annotation to say what the register is. We choose %rbp for
1592  this (perhaps perversely), because:
1593 
1594  (1) %rbp is already subject to unwinding. If a new register was
1595  chosen then the unwinder would have to unwind it in all stack
1596  traces, which is expensive, and
1597 
1598  (2) %rbp is already subject to precise exception updates in the
1599  JIT. If a new register was chosen, we'd have to have precise
1600  exceptions for it too, which reduces performance of the
1601  generated code.
1602 
1603  However .. one extra complication. We can't just whack the result
1604  of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1605  list of trashed registers at the end of the inline assembly
1606  fragments; gcc won't allow %rbp to appear in that list. Hence
1607  instead we need to stash %rbp in %r15 for the duration of the asm,
1608  and say that %r15 is trashed instead. gcc seems happy to go with
1609  that.
1610 
1611  Oh .. and this all needs to be conditionalised so that it is
1612  unchanged from before this commit, when compiled with older gccs
1613  that don't support __builtin_dwarf_cfa. Furthermore, since
1614  this header file is freestanding, it has to be independent of
1615  config.h, and so the following conditionalisation cannot depend on
1616  configure time checks.
1617 
1618  Although it's not clear from
1619  'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1620  this expression excludes Darwin.
1621  .cfi directives in Darwin assembly appear to be completely
1622  different and I haven't investigated how they work.
1623 
1624  For even more entertainment value, note we have to use the
1625  completely undocumented __builtin_dwarf_cfa(), which appears to
1626  really compute the CFA, whereas __builtin_frame_address(0) claims
1627  to but actually doesn't. See
1628  https://bugs.kde.org/show_bug.cgi?id=243270#c47
1629 */
1630 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1631 # define __FRAME_POINTER \
1632  ,"r"(__builtin_dwarf_cfa())
1633 # define VALGRIND_CFI_PROLOGUE \
1634  "movq %%rbp, %%r15\n\t" \
1635  "movq %2, %%rbp\n\t" \
1636  ".cfi_remember_state\n\t" \
1637  ".cfi_def_cfa rbp, 0\n\t"
1638 # define VALGRIND_CFI_EPILOGUE \
1639  "movq %%r15, %%rbp\n\t" \
1640  ".cfi_restore_state\n\t"
1641 #else
1642 # define __FRAME_POINTER
1643 # define VALGRIND_CFI_PROLOGUE
1644 # define VALGRIND_CFI_EPILOGUE
1645 #endif
1646 
1647 /* Macros to save and align the stack before making a function
1648  call and restore it afterwards as gcc may not keep the stack
1649  pointer aligned if it doesn't realize calls are being made
1650  to other functions. */
1651 
1652 #define VALGRIND_ALIGN_STACK \
1653  "movq %%rsp,%%r14\n\t" \
1654  "andq $0xfffffffffffffff0,%%rsp\n\t"
1655 #define VALGRIND_RESTORE_STACK \
1656  "movq %%r14,%%rsp\n\t"
1657 
1658 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1659  long) == 8. */
1660 
1661 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1662  macros. In order not to trash the stack redzone, we need to drop
1663  %rsp by 128 before the hidden call, and restore afterwards. The
1664  nastiness is that it is only by luck that the stack still appears
1665  to be unwindable during the hidden call - since then the behavior
1666  of any routine using this macro does not match what the CFI data
1667  says. Sigh.
1668 
1669  Why is this important? Imagine that a wrapper has a stack
1670  allocated local, and passes to the hidden call, a pointer to it.
1671  Because gcc does not know about the hidden call, it may allocate
1672  that local in the redzone. Unfortunately the hidden call may then
1673  trash it before it comes to use it. So we must step clear of the
1674  redzone, for the duration of the hidden call, to make it safe.
1675 
1676  Probably the same problem afflicts the other redzone-style ABIs too
1677  (ppc64-linux); but for those, the stack is
1678  self describing (none of this CFI nonsense) so at least messing
1679  with the stack pointer doesn't give a danger of non-unwindable
1680  stack. */
1681 
1682 #define CALL_FN_W_v(lval, orig) \
1683  do { \
1684  volatile OrigFn _orig = (orig); \
1685  volatile unsigned long _argvec[1]; \
1686  volatile unsigned long _res; \
1687  _argvec[0] = (unsigned long)_orig.nraddr; \
1688  __asm__ volatile( \
1689  VALGRIND_CFI_PROLOGUE \
1690  VALGRIND_ALIGN_STACK \
1691  "subq $128,%%rsp\n\t" \
1692  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1693  VALGRIND_CALL_NOREDIR_RAX \
1694  VALGRIND_RESTORE_STACK \
1695  VALGRIND_CFI_EPILOGUE \
1696  : /*out*/ "=a" (_res) \
1697  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1698  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1699  ); \
1700  lval = (__typeof__(lval)) _res; \
1701  } while (0)
1702 
1703 #define CALL_FN_W_W(lval, orig, arg1) \
1704  do { \
1705  volatile OrigFn _orig = (orig); \
1706  volatile unsigned long _argvec[2]; \
1707  volatile unsigned long _res; \
1708  _argvec[0] = (unsigned long)_orig.nraddr; \
1709  _argvec[1] = (unsigned long)(arg1); \
1710  __asm__ volatile( \
1711  VALGRIND_CFI_PROLOGUE \
1712  VALGRIND_ALIGN_STACK \
1713  "subq $128,%%rsp\n\t" \
1714  "movq 8(%%rax), %%rdi\n\t" \
1715  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1716  VALGRIND_CALL_NOREDIR_RAX \
1717  VALGRIND_RESTORE_STACK \
1718  VALGRIND_CFI_EPILOGUE \
1719  : /*out*/ "=a" (_res) \
1720  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1721  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1722  ); \
1723  lval = (__typeof__(lval)) _res; \
1724  } while (0)
1725 
1726 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1727  do { \
1728  volatile OrigFn _orig = (orig); \
1729  volatile unsigned long _argvec[3]; \
1730  volatile unsigned long _res; \
1731  _argvec[0] = (unsigned long)_orig.nraddr; \
1732  _argvec[1] = (unsigned long)(arg1); \
1733  _argvec[2] = (unsigned long)(arg2); \
1734  __asm__ volatile( \
1735  VALGRIND_CFI_PROLOGUE \
1736  VALGRIND_ALIGN_STACK \
1737  "subq $128,%%rsp\n\t" \
1738  "movq 16(%%rax), %%rsi\n\t" \
1739  "movq 8(%%rax), %%rdi\n\t" \
1740  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1741  VALGRIND_CALL_NOREDIR_RAX \
1742  VALGRIND_RESTORE_STACK \
1743  VALGRIND_CFI_EPILOGUE \
1744  : /*out*/ "=a" (_res) \
1745  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1746  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1747  ); \
1748  lval = (__typeof__(lval)) _res; \
1749  } while (0)
1750 
1751 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1752  do { \
1753  volatile OrigFn _orig = (orig); \
1754  volatile unsigned long _argvec[4]; \
1755  volatile unsigned long _res; \
1756  _argvec[0] = (unsigned long)_orig.nraddr; \
1757  _argvec[1] = (unsigned long)(arg1); \
1758  _argvec[2] = (unsigned long)(arg2); \
1759  _argvec[3] = (unsigned long)(arg3); \
1760  __asm__ volatile( \
1761  VALGRIND_CFI_PROLOGUE \
1762  VALGRIND_ALIGN_STACK \
1763  "subq $128,%%rsp\n\t" \
1764  "movq 24(%%rax), %%rdx\n\t" \
1765  "movq 16(%%rax), %%rsi\n\t" \
1766  "movq 8(%%rax), %%rdi\n\t" \
1767  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1768  VALGRIND_CALL_NOREDIR_RAX \
1769  VALGRIND_RESTORE_STACK \
1770  VALGRIND_CFI_EPILOGUE \
1771  : /*out*/ "=a" (_res) \
1772  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1773  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1774  ); \
1775  lval = (__typeof__(lval)) _res; \
1776  } while (0)
1777 
1778 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1779  do { \
1780  volatile OrigFn _orig = (orig); \
1781  volatile unsigned long _argvec[5]; \
1782  volatile unsigned long _res; \
1783  _argvec[0] = (unsigned long)_orig.nraddr; \
1784  _argvec[1] = (unsigned long)(arg1); \
1785  _argvec[2] = (unsigned long)(arg2); \
1786  _argvec[3] = (unsigned long)(arg3); \
1787  _argvec[4] = (unsigned long)(arg4); \
1788  __asm__ volatile( \
1789  VALGRIND_CFI_PROLOGUE \
1790  VALGRIND_ALIGN_STACK \
1791  "subq $128,%%rsp\n\t" \
1792  "movq 32(%%rax), %%rcx\n\t" \
1793  "movq 24(%%rax), %%rdx\n\t" \
1794  "movq 16(%%rax), %%rsi\n\t" \
1795  "movq 8(%%rax), %%rdi\n\t" \
1796  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1797  VALGRIND_CALL_NOREDIR_RAX \
1798  VALGRIND_RESTORE_STACK \
1799  VALGRIND_CFI_EPILOGUE \
1800  : /*out*/ "=a" (_res) \
1801  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1802  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1803  ); \
1804  lval = (__typeof__(lval)) _res; \
1805  } while (0)
1806 
1807 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1808  do { \
1809  volatile OrigFn _orig = (orig); \
1810  volatile unsigned long _argvec[6]; \
1811  volatile unsigned long _res; \
1812  _argvec[0] = (unsigned long)_orig.nraddr; \
1813  _argvec[1] = (unsigned long)(arg1); \
1814  _argvec[2] = (unsigned long)(arg2); \
1815  _argvec[3] = (unsigned long)(arg3); \
1816  _argvec[4] = (unsigned long)(arg4); \
1817  _argvec[5] = (unsigned long)(arg5); \
1818  __asm__ volatile( \
1819  VALGRIND_CFI_PROLOGUE \
1820  VALGRIND_ALIGN_STACK \
1821  "subq $128,%%rsp\n\t" \
1822  "movq 40(%%rax), %%r8\n\t" \
1823  "movq 32(%%rax), %%rcx\n\t" \
1824  "movq 24(%%rax), %%rdx\n\t" \
1825  "movq 16(%%rax), %%rsi\n\t" \
1826  "movq 8(%%rax), %%rdi\n\t" \
1827  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1828  VALGRIND_CALL_NOREDIR_RAX \
1829  VALGRIND_RESTORE_STACK \
1830  VALGRIND_CFI_EPILOGUE \
1831  : /*out*/ "=a" (_res) \
1832  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1833  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1834  ); \
1835  lval = (__typeof__(lval)) _res; \
1836  } while (0)
1837 
1838 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1839  do { \
1840  volatile OrigFn _orig = (orig); \
1841  volatile unsigned long _argvec[7]; \
1842  volatile unsigned long _res; \
1843  _argvec[0] = (unsigned long)_orig.nraddr; \
1844  _argvec[1] = (unsigned long)(arg1); \
1845  _argvec[2] = (unsigned long)(arg2); \
1846  _argvec[3] = (unsigned long)(arg3); \
1847  _argvec[4] = (unsigned long)(arg4); \
1848  _argvec[5] = (unsigned long)(arg5); \
1849  _argvec[6] = (unsigned long)(arg6); \
1850  __asm__ volatile( \
1851  VALGRIND_CFI_PROLOGUE \
1852  VALGRIND_ALIGN_STACK \
1853  "subq $128,%%rsp\n\t" \
1854  "movq 48(%%rax), %%r9\n\t" \
1855  "movq 40(%%rax), %%r8\n\t" \
1856  "movq 32(%%rax), %%rcx\n\t" \
1857  "movq 24(%%rax), %%rdx\n\t" \
1858  "movq 16(%%rax), %%rsi\n\t" \
1859  "movq 8(%%rax), %%rdi\n\t" \
1860  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1861  VALGRIND_CALL_NOREDIR_RAX \
1862  VALGRIND_RESTORE_STACK \
1863  VALGRIND_CFI_EPILOGUE \
1864  : /*out*/ "=a" (_res) \
1865  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1866  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1867  ); \
1868  lval = (__typeof__(lval)) _res; \
1869  } while (0)
1870 
1871 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1872  arg7) \
1873  do { \
1874  volatile OrigFn _orig = (orig); \
1875  volatile unsigned long _argvec[8]; \
1876  volatile unsigned long _res; \
1877  _argvec[0] = (unsigned long)_orig.nraddr; \
1878  _argvec[1] = (unsigned long)(arg1); \
1879  _argvec[2] = (unsigned long)(arg2); \
1880  _argvec[3] = (unsigned long)(arg3); \
1881  _argvec[4] = (unsigned long)(arg4); \
1882  _argvec[5] = (unsigned long)(arg5); \
1883  _argvec[6] = (unsigned long)(arg6); \
1884  _argvec[7] = (unsigned long)(arg7); \
1885  __asm__ volatile( \
1886  VALGRIND_CFI_PROLOGUE \
1887  VALGRIND_ALIGN_STACK \
1888  "subq $136,%%rsp\n\t" \
1889  "pushq 56(%%rax)\n\t" \
1890  "movq 48(%%rax), %%r9\n\t" \
1891  "movq 40(%%rax), %%r8\n\t" \
1892  "movq 32(%%rax), %%rcx\n\t" \
1893  "movq 24(%%rax), %%rdx\n\t" \
1894  "movq 16(%%rax), %%rsi\n\t" \
1895  "movq 8(%%rax), %%rdi\n\t" \
1896  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1897  VALGRIND_CALL_NOREDIR_RAX \
1898  VALGRIND_RESTORE_STACK \
1899  VALGRIND_CFI_EPILOGUE \
1900  : /*out*/ "=a" (_res) \
1901  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1902  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1903  ); \
1904  lval = (__typeof__(lval)) _res; \
1905  } while (0)
1906 
1907 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1908  arg7,arg8) \
1909  do { \
1910  volatile OrigFn _orig = (orig); \
1911  volatile unsigned long _argvec[9]; \
1912  volatile unsigned long _res; \
1913  _argvec[0] = (unsigned long)_orig.nraddr; \
1914  _argvec[1] = (unsigned long)(arg1); \
1915  _argvec[2] = (unsigned long)(arg2); \
1916  _argvec[3] = (unsigned long)(arg3); \
1917  _argvec[4] = (unsigned long)(arg4); \
1918  _argvec[5] = (unsigned long)(arg5); \
1919  _argvec[6] = (unsigned long)(arg6); \
1920  _argvec[7] = (unsigned long)(arg7); \
1921  _argvec[8] = (unsigned long)(arg8); \
1922  __asm__ volatile( \
1923  VALGRIND_CFI_PROLOGUE \
1924  VALGRIND_ALIGN_STACK \
1925  "subq $128,%%rsp\n\t" \
1926  "pushq 64(%%rax)\n\t" \
1927  "pushq 56(%%rax)\n\t" \
1928  "movq 48(%%rax), %%r9\n\t" \
1929  "movq 40(%%rax), %%r8\n\t" \
1930  "movq 32(%%rax), %%rcx\n\t" \
1931  "movq 24(%%rax), %%rdx\n\t" \
1932  "movq 16(%%rax), %%rsi\n\t" \
1933  "movq 8(%%rax), %%rdi\n\t" \
1934  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1935  VALGRIND_CALL_NOREDIR_RAX \
1936  VALGRIND_RESTORE_STACK \
1937  VALGRIND_CFI_EPILOGUE \
1938  : /*out*/ "=a" (_res) \
1939  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1940  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1941  ); \
1942  lval = (__typeof__(lval)) _res; \
1943  } while (0)
1944 
1945 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1946  arg7,arg8,arg9) \
1947  do { \
1948  volatile OrigFn _orig = (orig); \
1949  volatile unsigned long _argvec[10]; \
1950  volatile unsigned long _res; \
1951  _argvec[0] = (unsigned long)_orig.nraddr; \
1952  _argvec[1] = (unsigned long)(arg1); \
1953  _argvec[2] = (unsigned long)(arg2); \
1954  _argvec[3] = (unsigned long)(arg3); \
1955  _argvec[4] = (unsigned long)(arg4); \
1956  _argvec[5] = (unsigned long)(arg5); \
1957  _argvec[6] = (unsigned long)(arg6); \
1958  _argvec[7] = (unsigned long)(arg7); \
1959  _argvec[8] = (unsigned long)(arg8); \
1960  _argvec[9] = (unsigned long)(arg9); \
1961  __asm__ volatile( \
1962  VALGRIND_CFI_PROLOGUE \
1963  VALGRIND_ALIGN_STACK \
1964  "subq $136,%%rsp\n\t" \
1965  "pushq 72(%%rax)\n\t" \
1966  "pushq 64(%%rax)\n\t" \
1967  "pushq 56(%%rax)\n\t" \
1968  "movq 48(%%rax), %%r9\n\t" \
1969  "movq 40(%%rax), %%r8\n\t" \
1970  "movq 32(%%rax), %%rcx\n\t" \
1971  "movq 24(%%rax), %%rdx\n\t" \
1972  "movq 16(%%rax), %%rsi\n\t" \
1973  "movq 8(%%rax), %%rdi\n\t" \
1974  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1975  VALGRIND_CALL_NOREDIR_RAX \
1976  VALGRIND_RESTORE_STACK \
1977  VALGRIND_CFI_EPILOGUE \
1978  : /*out*/ "=a" (_res) \
1979  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1980  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1981  ); \
1982  lval = (__typeof__(lval)) _res; \
1983  } while (0)
1984 
1985 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1986  arg7,arg8,arg9,arg10) \
1987  do { \
1988  volatile OrigFn _orig = (orig); \
1989  volatile unsigned long _argvec[11]; \
1990  volatile unsigned long _res; \
1991  _argvec[0] = (unsigned long)_orig.nraddr; \
1992  _argvec[1] = (unsigned long)(arg1); \
1993  _argvec[2] = (unsigned long)(arg2); \
1994  _argvec[3] = (unsigned long)(arg3); \
1995  _argvec[4] = (unsigned long)(arg4); \
1996  _argvec[5] = (unsigned long)(arg5); \
1997  _argvec[6] = (unsigned long)(arg6); \
1998  _argvec[7] = (unsigned long)(arg7); \
1999  _argvec[8] = (unsigned long)(arg8); \
2000  _argvec[9] = (unsigned long)(arg9); \
2001  _argvec[10] = (unsigned long)(arg10); \
2002  __asm__ volatile( \
2003  VALGRIND_CFI_PROLOGUE \
2004  VALGRIND_ALIGN_STACK \
2005  "subq $128,%%rsp\n\t" \
2006  "pushq 80(%%rax)\n\t" \
2007  "pushq 72(%%rax)\n\t" \
2008  "pushq 64(%%rax)\n\t" \
2009  "pushq 56(%%rax)\n\t" \
2010  "movq 48(%%rax), %%r9\n\t" \
2011  "movq 40(%%rax), %%r8\n\t" \
2012  "movq 32(%%rax), %%rcx\n\t" \
2013  "movq 24(%%rax), %%rdx\n\t" \
2014  "movq 16(%%rax), %%rsi\n\t" \
2015  "movq 8(%%rax), %%rdi\n\t" \
2016  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2017  VALGRIND_CALL_NOREDIR_RAX \
2018  VALGRIND_RESTORE_STACK \
2019  VALGRIND_CFI_EPILOGUE \
2020  : /*out*/ "=a" (_res) \
2021  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2022  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2023  ); \
2024  lval = (__typeof__(lval)) _res; \
2025  } while (0)
2026 
2027 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2028  arg7,arg8,arg9,arg10,arg11) \
2029  do { \
2030  volatile OrigFn _orig = (orig); \
2031  volatile unsigned long _argvec[12]; \
2032  volatile unsigned long _res; \
2033  _argvec[0] = (unsigned long)_orig.nraddr; \
2034  _argvec[1] = (unsigned long)(arg1); \
2035  _argvec[2] = (unsigned long)(arg2); \
2036  _argvec[3] = (unsigned long)(arg3); \
2037  _argvec[4] = (unsigned long)(arg4); \
2038  _argvec[5] = (unsigned long)(arg5); \
2039  _argvec[6] = (unsigned long)(arg6); \
2040  _argvec[7] = (unsigned long)(arg7); \
2041  _argvec[8] = (unsigned long)(arg8); \
2042  _argvec[9] = (unsigned long)(arg9); \
2043  _argvec[10] = (unsigned long)(arg10); \
2044  _argvec[11] = (unsigned long)(arg11); \
2045  __asm__ volatile( \
2046  VALGRIND_CFI_PROLOGUE \
2047  VALGRIND_ALIGN_STACK \
2048  "subq $136,%%rsp\n\t" \
2049  "pushq 88(%%rax)\n\t" \
2050  "pushq 80(%%rax)\n\t" \
2051  "pushq 72(%%rax)\n\t" \
2052  "pushq 64(%%rax)\n\t" \
2053  "pushq 56(%%rax)\n\t" \
2054  "movq 48(%%rax), %%r9\n\t" \
2055  "movq 40(%%rax), %%r8\n\t" \
2056  "movq 32(%%rax), %%rcx\n\t" \
2057  "movq 24(%%rax), %%rdx\n\t" \
2058  "movq 16(%%rax), %%rsi\n\t" \
2059  "movq 8(%%rax), %%rdi\n\t" \
2060  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2061  VALGRIND_CALL_NOREDIR_RAX \
2062  VALGRIND_RESTORE_STACK \
2063  VALGRIND_CFI_EPILOGUE \
2064  : /*out*/ "=a" (_res) \
2065  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2066  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2067  ); \
2068  lval = (__typeof__(lval)) _res; \
2069  } while (0)
2070 
2071 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2072  arg7,arg8,arg9,arg10,arg11,arg12) \
2073  do { \
2074  volatile OrigFn _orig = (orig); \
2075  volatile unsigned long _argvec[13]; \
2076  volatile unsigned long _res; \
2077  _argvec[0] = (unsigned long)_orig.nraddr; \
2078  _argvec[1] = (unsigned long)(arg1); \
2079  _argvec[2] = (unsigned long)(arg2); \
2080  _argvec[3] = (unsigned long)(arg3); \
2081  _argvec[4] = (unsigned long)(arg4); \
2082  _argvec[5] = (unsigned long)(arg5); \
2083  _argvec[6] = (unsigned long)(arg6); \
2084  _argvec[7] = (unsigned long)(arg7); \
2085  _argvec[8] = (unsigned long)(arg8); \
2086  _argvec[9] = (unsigned long)(arg9); \
2087  _argvec[10] = (unsigned long)(arg10); \
2088  _argvec[11] = (unsigned long)(arg11); \
2089  _argvec[12] = (unsigned long)(arg12); \
2090  __asm__ volatile( \
2091  VALGRIND_CFI_PROLOGUE \
2092  VALGRIND_ALIGN_STACK \
2093  "subq $128,%%rsp\n\t" \
2094  "pushq 96(%%rax)\n\t" \
2095  "pushq 88(%%rax)\n\t" \
2096  "pushq 80(%%rax)\n\t" \
2097  "pushq 72(%%rax)\n\t" \
2098  "pushq 64(%%rax)\n\t" \
2099  "pushq 56(%%rax)\n\t" \
2100  "movq 48(%%rax), %%r9\n\t" \
2101  "movq 40(%%rax), %%r8\n\t" \
2102  "movq 32(%%rax), %%rcx\n\t" \
2103  "movq 24(%%rax), %%rdx\n\t" \
2104  "movq 16(%%rax), %%rsi\n\t" \
2105  "movq 8(%%rax), %%rdi\n\t" \
2106  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2107  VALGRIND_CALL_NOREDIR_RAX \
2108  VALGRIND_RESTORE_STACK \
2109  VALGRIND_CFI_EPILOGUE \
2110  : /*out*/ "=a" (_res) \
2111  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2112  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2113  ); \
2114  lval = (__typeof__(lval)) _res; \
2115  } while (0)
2116 
2117 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2118 
2119 /* ------------------------ ppc32-linux ------------------------ */
2120 
2121 #if defined(PLAT_ppc32_linux)
2122 
2123 /* This is useful for finding out about the on-stack stuff:
2124 
2125  extern int f9 ( int,int,int,int,int,int,int,int,int );
2126  extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2127  extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2128  extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2129 
2130  int g9 ( void ) {
2131  return f9(11,22,33,44,55,66,77,88,99);
2132  }
2133  int g10 ( void ) {
2134  return f10(11,22,33,44,55,66,77,88,99,110);
2135  }
2136  int g11 ( void ) {
2137  return f11(11,22,33,44,55,66,77,88,99,110,121);
2138  }
2139  int g12 ( void ) {
2140  return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2141  }
2142 */
2143 
2144 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2145 
2146 /* These regs are trashed by the hidden call. */
2147 #define __CALLER_SAVED_REGS \
2148  "lr", "ctr", "xer", \
2149  "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2150  "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2151  "r11", "r12", "r13"
2152 
2153 /* Macros to save and align the stack before making a function
2154  call and restore it afterwards as gcc may not keep the stack
2155  pointer aligned if it doesn't realize calls are being made
2156  to other functions. */
2157 
2158 #define VALGRIND_ALIGN_STACK \
2159  "mr 28,1\n\t" \
2160  "rlwinm 1,1,0,0,27\n\t"
2161 #define VALGRIND_RESTORE_STACK \
2162  "mr 1,28\n\t"
2163 
2164 /* These CALL_FN_ macros assume that on ppc32-linux,
2165  sizeof(unsigned long) == 4. */
2166 
2167 #define CALL_FN_W_v(lval, orig) \
2168  do { \
2169  volatile OrigFn _orig = (orig); \
2170  volatile unsigned long _argvec[1]; \
2171  volatile unsigned long _res; \
2172  _argvec[0] = (unsigned long)_orig.nraddr; \
2173  __asm__ volatile( \
2174  VALGRIND_ALIGN_STACK \
2175  "mr 11,%1\n\t" \
2176  "lwz 11,0(11)\n\t" /* target->r11 */ \
2177  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2178  VALGRIND_RESTORE_STACK \
2179  "mr %0,3" \
2180  : /*out*/ "=r" (_res) \
2181  : /*in*/ "r" (&_argvec[0]) \
2182  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2183  ); \
2184  lval = (__typeof__(lval)) _res; \
2185  } while (0)
2186 
2187 #define CALL_FN_W_W(lval, orig, arg1) \
2188  do { \
2189  volatile OrigFn _orig = (orig); \
2190  volatile unsigned long _argvec[2]; \
2191  volatile unsigned long _res; \
2192  _argvec[0] = (unsigned long)_orig.nraddr; \
2193  _argvec[1] = (unsigned long)arg1; \
2194  __asm__ volatile( \
2195  VALGRIND_ALIGN_STACK \
2196  "mr 11,%1\n\t" \
2197  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2198  "lwz 11,0(11)\n\t" /* target->r11 */ \
2199  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2200  VALGRIND_RESTORE_STACK \
2201  "mr %0,3" \
2202  : /*out*/ "=r" (_res) \
2203  : /*in*/ "r" (&_argvec[0]) \
2204  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2205  ); \
2206  lval = (__typeof__(lval)) _res; \
2207  } while (0)
2208 
2209 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2210  do { \
2211  volatile OrigFn _orig = (orig); \
2212  volatile unsigned long _argvec[3]; \
2213  volatile unsigned long _res; \
2214  _argvec[0] = (unsigned long)_orig.nraddr; \
2215  _argvec[1] = (unsigned long)arg1; \
2216  _argvec[2] = (unsigned long)arg2; \
2217  __asm__ volatile( \
2218  VALGRIND_ALIGN_STACK \
2219  "mr 11,%1\n\t" \
2220  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2221  "lwz 4,8(11)\n\t" \
2222  "lwz 11,0(11)\n\t" /* target->r11 */ \
2223  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2224  VALGRIND_RESTORE_STACK \
2225  "mr %0,3" \
2226  : /*out*/ "=r" (_res) \
2227  : /*in*/ "r" (&_argvec[0]) \
2228  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2229  ); \
2230  lval = (__typeof__(lval)) _res; \
2231  } while (0)
2232 
2233 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2234  do { \
2235  volatile OrigFn _orig = (orig); \
2236  volatile unsigned long _argvec[4]; \
2237  volatile unsigned long _res; \
2238  _argvec[0] = (unsigned long)_orig.nraddr; \
2239  _argvec[1] = (unsigned long)arg1; \
2240  _argvec[2] = (unsigned long)arg2; \
2241  _argvec[3] = (unsigned long)arg3; \
2242  __asm__ volatile( \
2243  VALGRIND_ALIGN_STACK \
2244  "mr 11,%1\n\t" \
2245  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2246  "lwz 4,8(11)\n\t" \
2247  "lwz 5,12(11)\n\t" \
2248  "lwz 11,0(11)\n\t" /* target->r11 */ \
2249  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2250  VALGRIND_RESTORE_STACK \
2251  "mr %0,3" \
2252  : /*out*/ "=r" (_res) \
2253  : /*in*/ "r" (&_argvec[0]) \
2254  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2255  ); \
2256  lval = (__typeof__(lval)) _res; \
2257  } while (0)
2258 
2259 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2260  do { \
2261  volatile OrigFn _orig = (orig); \
2262  volatile unsigned long _argvec[5]; \
2263  volatile unsigned long _res; \
2264  _argvec[0] = (unsigned long)_orig.nraddr; \
2265  _argvec[1] = (unsigned long)arg1; \
2266  _argvec[2] = (unsigned long)arg2; \
2267  _argvec[3] = (unsigned long)arg3; \
2268  _argvec[4] = (unsigned long)arg4; \
2269  __asm__ volatile( \
2270  VALGRIND_ALIGN_STACK \
2271  "mr 11,%1\n\t" \
2272  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2273  "lwz 4,8(11)\n\t" \
2274  "lwz 5,12(11)\n\t" \
2275  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2276  "lwz 11,0(11)\n\t" /* target->r11 */ \
2277  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2278  VALGRIND_RESTORE_STACK \
2279  "mr %0,3" \
2280  : /*out*/ "=r" (_res) \
2281  : /*in*/ "r" (&_argvec[0]) \
2282  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2283  ); \
2284  lval = (__typeof__(lval)) _res; \
2285  } while (0)
2286 
2287 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2288  do { \
2289  volatile OrigFn _orig = (orig); \
2290  volatile unsigned long _argvec[6]; \
2291  volatile unsigned long _res; \
2292  _argvec[0] = (unsigned long)_orig.nraddr; \
2293  _argvec[1] = (unsigned long)arg1; \
2294  _argvec[2] = (unsigned long)arg2; \
2295  _argvec[3] = (unsigned long)arg3; \
2296  _argvec[4] = (unsigned long)arg4; \
2297  _argvec[5] = (unsigned long)arg5; \
2298  __asm__ volatile( \
2299  VALGRIND_ALIGN_STACK \
2300  "mr 11,%1\n\t" \
2301  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2302  "lwz 4,8(11)\n\t" \
2303  "lwz 5,12(11)\n\t" \
2304  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2305  "lwz 7,20(11)\n\t" \
2306  "lwz 11,0(11)\n\t" /* target->r11 */ \
2307  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2308  VALGRIND_RESTORE_STACK \
2309  "mr %0,3" \
2310  : /*out*/ "=r" (_res) \
2311  : /*in*/ "r" (&_argvec[0]) \
2312  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2313  ); \
2314  lval = (__typeof__(lval)) _res; \
2315  } while (0)
2316 
2317 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2318  do { \
2319  volatile OrigFn _orig = (orig); \
2320  volatile unsigned long _argvec[7]; \
2321  volatile unsigned long _res; \
2322  _argvec[0] = (unsigned long)_orig.nraddr; \
2323  _argvec[1] = (unsigned long)arg1; \
2324  _argvec[2] = (unsigned long)arg2; \
2325  _argvec[3] = (unsigned long)arg3; \
2326  _argvec[4] = (unsigned long)arg4; \
2327  _argvec[5] = (unsigned long)arg5; \
2328  _argvec[6] = (unsigned long)arg6; \
2329  __asm__ volatile( \
2330  VALGRIND_ALIGN_STACK \
2331  "mr 11,%1\n\t" \
2332  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2333  "lwz 4,8(11)\n\t" \
2334  "lwz 5,12(11)\n\t" \
2335  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2336  "lwz 7,20(11)\n\t" \
2337  "lwz 8,24(11)\n\t" \
2338  "lwz 11,0(11)\n\t" /* target->r11 */ \
2339  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2340  VALGRIND_RESTORE_STACK \
2341  "mr %0,3" \
2342  : /*out*/ "=r" (_res) \
2343  : /*in*/ "r" (&_argvec[0]) \
2344  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2345  ); \
2346  lval = (__typeof__(lval)) _res; \
2347  } while (0)
2348 
2349 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2350  arg7) \
2351  do { \
2352  volatile OrigFn _orig = (orig); \
2353  volatile unsigned long _argvec[8]; \
2354  volatile unsigned long _res; \
2355  _argvec[0] = (unsigned long)_orig.nraddr; \
2356  _argvec[1] = (unsigned long)arg1; \
2357  _argvec[2] = (unsigned long)arg2; \
2358  _argvec[3] = (unsigned long)arg3; \
2359  _argvec[4] = (unsigned long)arg4; \
2360  _argvec[5] = (unsigned long)arg5; \
2361  _argvec[6] = (unsigned long)arg6; \
2362  _argvec[7] = (unsigned long)arg7; \
2363  __asm__ volatile( \
2364  VALGRIND_ALIGN_STACK \
2365  "mr 11,%1\n\t" \
2366  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2367  "lwz 4,8(11)\n\t" \
2368  "lwz 5,12(11)\n\t" \
2369  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2370  "lwz 7,20(11)\n\t" \
2371  "lwz 8,24(11)\n\t" \
2372  "lwz 9,28(11)\n\t" \
2373  "lwz 11,0(11)\n\t" /* target->r11 */ \
2374  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2375  VALGRIND_RESTORE_STACK \
2376  "mr %0,3" \
2377  : /*out*/ "=r" (_res) \
2378  : /*in*/ "r" (&_argvec[0]) \
2379  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2380  ); \
2381  lval = (__typeof__(lval)) _res; \
2382  } while (0)
2383 
2384 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2385  arg7,arg8) \
2386  do { \
2387  volatile OrigFn _orig = (orig); \
2388  volatile unsigned long _argvec[9]; \
2389  volatile unsigned long _res; \
2390  _argvec[0] = (unsigned long)_orig.nraddr; \
2391  _argvec[1] = (unsigned long)arg1; \
2392  _argvec[2] = (unsigned long)arg2; \
2393  _argvec[3] = (unsigned long)arg3; \
2394  _argvec[4] = (unsigned long)arg4; \
2395  _argvec[5] = (unsigned long)arg5; \
2396  _argvec[6] = (unsigned long)arg6; \
2397  _argvec[7] = (unsigned long)arg7; \
2398  _argvec[8] = (unsigned long)arg8; \
2399  __asm__ volatile( \
2400  VALGRIND_ALIGN_STACK \
2401  "mr 11,%1\n\t" \
2402  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2403  "lwz 4,8(11)\n\t" \
2404  "lwz 5,12(11)\n\t" \
2405  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2406  "lwz 7,20(11)\n\t" \
2407  "lwz 8,24(11)\n\t" \
2408  "lwz 9,28(11)\n\t" \
2409  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2410  "lwz 11,0(11)\n\t" /* target->r11 */ \
2411  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2412  VALGRIND_RESTORE_STACK \
2413  "mr %0,3" \
2414  : /*out*/ "=r" (_res) \
2415  : /*in*/ "r" (&_argvec[0]) \
2416  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2417  ); \
2418  lval = (__typeof__(lval)) _res; \
2419  } while (0)
2420 
2421 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2422  arg7,arg8,arg9) \
2423  do { \
2424  volatile OrigFn _orig = (orig); \
2425  volatile unsigned long _argvec[10]; \
2426  volatile unsigned long _res; \
2427  _argvec[0] = (unsigned long)_orig.nraddr; \
2428  _argvec[1] = (unsigned long)arg1; \
2429  _argvec[2] = (unsigned long)arg2; \
2430  _argvec[3] = (unsigned long)arg3; \
2431  _argvec[4] = (unsigned long)arg4; \
2432  _argvec[5] = (unsigned long)arg5; \
2433  _argvec[6] = (unsigned long)arg6; \
2434  _argvec[7] = (unsigned long)arg7; \
2435  _argvec[8] = (unsigned long)arg8; \
2436  _argvec[9] = (unsigned long)arg9; \
2437  __asm__ volatile( \
2438  VALGRIND_ALIGN_STACK \
2439  "mr 11,%1\n\t" \
2440  "addi 1,1,-16\n\t" \
2441  /* arg9 */ \
2442  "lwz 3,36(11)\n\t" \
2443  "stw 3,8(1)\n\t" \
2444  /* args1-8 */ \
2445  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2446  "lwz 4,8(11)\n\t" \
2447  "lwz 5,12(11)\n\t" \
2448  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2449  "lwz 7,20(11)\n\t" \
2450  "lwz 8,24(11)\n\t" \
2451  "lwz 9,28(11)\n\t" \
2452  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2453  "lwz 11,0(11)\n\t" /* target->r11 */ \
2454  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2455  VALGRIND_RESTORE_STACK \
2456  "mr %0,3" \
2457  : /*out*/ "=r" (_res) \
2458  : /*in*/ "r" (&_argvec[0]) \
2459  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2460  ); \
2461  lval = (__typeof__(lval)) _res; \
2462  } while (0)
2463 
2464 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2465  arg7,arg8,arg9,arg10) \
2466  do { \
2467  volatile OrigFn _orig = (orig); \
2468  volatile unsigned long _argvec[11]; \
2469  volatile unsigned long _res; \
2470  _argvec[0] = (unsigned long)_orig.nraddr; \
2471  _argvec[1] = (unsigned long)arg1; \
2472  _argvec[2] = (unsigned long)arg2; \
2473  _argvec[3] = (unsigned long)arg3; \
2474  _argvec[4] = (unsigned long)arg4; \
2475  _argvec[5] = (unsigned long)arg5; \
2476  _argvec[6] = (unsigned long)arg6; \
2477  _argvec[7] = (unsigned long)arg7; \
2478  _argvec[8] = (unsigned long)arg8; \
2479  _argvec[9] = (unsigned long)arg9; \
2480  _argvec[10] = (unsigned long)arg10; \
2481  __asm__ volatile( \
2482  VALGRIND_ALIGN_STACK \
2483  "mr 11,%1\n\t" \
2484  "addi 1,1,-16\n\t" \
2485  /* arg10 */ \
2486  "lwz 3,40(11)\n\t" \
2487  "stw 3,12(1)\n\t" \
2488  /* arg9 */ \
2489  "lwz 3,36(11)\n\t" \
2490  "stw 3,8(1)\n\t" \
2491  /* args1-8 */ \
2492  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2493  "lwz 4,8(11)\n\t" \
2494  "lwz 5,12(11)\n\t" \
2495  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2496  "lwz 7,20(11)\n\t" \
2497  "lwz 8,24(11)\n\t" \
2498  "lwz 9,28(11)\n\t" \
2499  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2500  "lwz 11,0(11)\n\t" /* target->r11 */ \
2501  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2502  VALGRIND_RESTORE_STACK \
2503  "mr %0,3" \
2504  : /*out*/ "=r" (_res) \
2505  : /*in*/ "r" (&_argvec[0]) \
2506  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2507  ); \
2508  lval = (__typeof__(lval)) _res; \
2509  } while (0)
2510 
2511 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2512  arg7,arg8,arg9,arg10,arg11) \
2513  do { \
2514  volatile OrigFn _orig = (orig); \
2515  volatile unsigned long _argvec[12]; \
2516  volatile unsigned long _res; \
2517  _argvec[0] = (unsigned long)_orig.nraddr; \
2518  _argvec[1] = (unsigned long)arg1; \
2519  _argvec[2] = (unsigned long)arg2; \
2520  _argvec[3] = (unsigned long)arg3; \
2521  _argvec[4] = (unsigned long)arg4; \
2522  _argvec[5] = (unsigned long)arg5; \
2523  _argvec[6] = (unsigned long)arg6; \
2524  _argvec[7] = (unsigned long)arg7; \
2525  _argvec[8] = (unsigned long)arg8; \
2526  _argvec[9] = (unsigned long)arg9; \
2527  _argvec[10] = (unsigned long)arg10; \
2528  _argvec[11] = (unsigned long)arg11; \
2529  __asm__ volatile( \
2530  VALGRIND_ALIGN_STACK \
2531  "mr 11,%1\n\t" \
2532  "addi 1,1,-32\n\t" \
2533  /* arg11 */ \
2534  "lwz 3,44(11)\n\t" \
2535  "stw 3,16(1)\n\t" \
2536  /* arg10 */ \
2537  "lwz 3,40(11)\n\t" \
2538  "stw 3,12(1)\n\t" \
2539  /* arg9 */ \
2540  "lwz 3,36(11)\n\t" \
2541  "stw 3,8(1)\n\t" \
2542  /* args1-8 */ \
2543  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2544  "lwz 4,8(11)\n\t" \
2545  "lwz 5,12(11)\n\t" \
2546  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2547  "lwz 7,20(11)\n\t" \
2548  "lwz 8,24(11)\n\t" \
2549  "lwz 9,28(11)\n\t" \
2550  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2551  "lwz 11,0(11)\n\t" /* target->r11 */ \
2552  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2553  VALGRIND_RESTORE_STACK \
2554  "mr %0,3" \
2555  : /*out*/ "=r" (_res) \
2556  : /*in*/ "r" (&_argvec[0]) \
2557  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2558  ); \
2559  lval = (__typeof__(lval)) _res; \
2560  } while (0)
2561 
2562 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2563  arg7,arg8,arg9,arg10,arg11,arg12) \
2564  do { \
2565  volatile OrigFn _orig = (orig); \
2566  volatile unsigned long _argvec[13]; \
2567  volatile unsigned long _res; \
2568  _argvec[0] = (unsigned long)_orig.nraddr; \
2569  _argvec[1] = (unsigned long)arg1; \
2570  _argvec[2] = (unsigned long)arg2; \
2571  _argvec[3] = (unsigned long)arg3; \
2572  _argvec[4] = (unsigned long)arg4; \
2573  _argvec[5] = (unsigned long)arg5; \
2574  _argvec[6] = (unsigned long)arg6; \
2575  _argvec[7] = (unsigned long)arg7; \
2576  _argvec[8] = (unsigned long)arg8; \
2577  _argvec[9] = (unsigned long)arg9; \
2578  _argvec[10] = (unsigned long)arg10; \
2579  _argvec[11] = (unsigned long)arg11; \
2580  _argvec[12] = (unsigned long)arg12; \
2581  __asm__ volatile( \
2582  VALGRIND_ALIGN_STACK \
2583  "mr 11,%1\n\t" \
2584  "addi 1,1,-32\n\t" \
2585  /* arg12 */ \
2586  "lwz 3,48(11)\n\t" \
2587  "stw 3,20(1)\n\t" \
2588  /* arg11 */ \
2589  "lwz 3,44(11)\n\t" \
2590  "stw 3,16(1)\n\t" \
2591  /* arg10 */ \
2592  "lwz 3,40(11)\n\t" \
2593  "stw 3,12(1)\n\t" \
2594  /* arg9 */ \
2595  "lwz 3,36(11)\n\t" \
2596  "stw 3,8(1)\n\t" \
2597  /* args1-8 */ \
2598  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2599  "lwz 4,8(11)\n\t" \
2600  "lwz 5,12(11)\n\t" \
2601  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2602  "lwz 7,20(11)\n\t" \
2603  "lwz 8,24(11)\n\t" \
2604  "lwz 9,28(11)\n\t" \
2605  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2606  "lwz 11,0(11)\n\t" /* target->r11 */ \
2607  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2608  VALGRIND_RESTORE_STACK \
2609  "mr %0,3" \
2610  : /*out*/ "=r" (_res) \
2611  : /*in*/ "r" (&_argvec[0]) \
2612  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2613  ); \
2614  lval = (__typeof__(lval)) _res; \
2615  } while (0)
2616 
2617 #endif /* PLAT_ppc32_linux */
2618 
2619 /* ------------------------ ppc64-linux ------------------------ */
2620 
2621 #if defined(PLAT_ppc64be_linux)
2622 
2623 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2624 
2625 /* These regs are trashed by the hidden call. */
2626 #define __CALLER_SAVED_REGS \
2627  "lr", "ctr", "xer", \
2628  "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2629  "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2630  "r11", "r12", "r13"
2631 
2632 /* Macros to save and align the stack before making a function
2633  call and restore it afterwards as gcc may not keep the stack
2634  pointer aligned if it doesn't realize calls are being made
2635  to other functions. */
2636 
2637 #define VALGRIND_ALIGN_STACK \
2638  "mr 28,1\n\t" \
2639  "rldicr 1,1,0,59\n\t"
2640 #define VALGRIND_RESTORE_STACK \
2641  "mr 1,28\n\t"
2642 
2643 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2644  long) == 8. */
2645 
2646 #define CALL_FN_W_v(lval, orig) \
2647  do { \
2648  volatile OrigFn _orig = (orig); \
2649  volatile unsigned long _argvec[3+0]; \
2650  volatile unsigned long _res; \
2651  /* _argvec[0] holds current r2 across the call */ \
2652  _argvec[1] = (unsigned long)_orig.r2; \
2653  _argvec[2] = (unsigned long)_orig.nraddr; \
2654  __asm__ volatile( \
2655  VALGRIND_ALIGN_STACK \
2656  "mr 11,%1\n\t" \
2657  "std 2,-16(11)\n\t" /* save tocptr */ \
2658  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2659  "ld 11, 0(11)\n\t" /* target->r11 */ \
2660  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2661  "mr 11,%1\n\t" \
2662  "mr %0,3\n\t" \
2663  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2664  VALGRIND_RESTORE_STACK \
2665  : /*out*/ "=r" (_res) \
2666  : /*in*/ "r" (&_argvec[2]) \
2667  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2668  ); \
2669  lval = (__typeof__(lval)) _res; \
2670  } while (0)
2671 
2672 #define CALL_FN_W_W(lval, orig, arg1) \
2673  do { \
2674  volatile OrigFn _orig = (orig); \
2675  volatile unsigned long _argvec[3+1]; \
2676  volatile unsigned long _res; \
2677  /* _argvec[0] holds current r2 across the call */ \
2678  _argvec[1] = (unsigned long)_orig.r2; \
2679  _argvec[2] = (unsigned long)_orig.nraddr; \
2680  _argvec[2+1] = (unsigned long)arg1; \
2681  __asm__ volatile( \
2682  VALGRIND_ALIGN_STACK \
2683  "mr 11,%1\n\t" \
2684  "std 2,-16(11)\n\t" /* save tocptr */ \
2685  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2686  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2687  "ld 11, 0(11)\n\t" /* target->r11 */ \
2688  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2689  "mr 11,%1\n\t" \
2690  "mr %0,3\n\t" \
2691  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2692  VALGRIND_RESTORE_STACK \
2693  : /*out*/ "=r" (_res) \
2694  : /*in*/ "r" (&_argvec[2]) \
2695  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2696  ); \
2697  lval = (__typeof__(lval)) _res; \
2698  } while (0)
2699 
2700 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2701  do { \
2702  volatile OrigFn _orig = (orig); \
2703  volatile unsigned long _argvec[3+2]; \
2704  volatile unsigned long _res; \
2705  /* _argvec[0] holds current r2 across the call */ \
2706  _argvec[1] = (unsigned long)_orig.r2; \
2707  _argvec[2] = (unsigned long)_orig.nraddr; \
2708  _argvec[2+1] = (unsigned long)arg1; \
2709  _argvec[2+2] = (unsigned long)arg2; \
2710  __asm__ volatile( \
2711  VALGRIND_ALIGN_STACK \
2712  "mr 11,%1\n\t" \
2713  "std 2,-16(11)\n\t" /* save tocptr */ \
2714  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2715  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2716  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2717  "ld 11, 0(11)\n\t" /* target->r11 */ \
2718  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2719  "mr 11,%1\n\t" \
2720  "mr %0,3\n\t" \
2721  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2722  VALGRIND_RESTORE_STACK \
2723  : /*out*/ "=r" (_res) \
2724  : /*in*/ "r" (&_argvec[2]) \
2725  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2726  ); \
2727  lval = (__typeof__(lval)) _res; \
2728  } while (0)
2729 
2730 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2731  do { \
2732  volatile OrigFn _orig = (orig); \
2733  volatile unsigned long _argvec[3+3]; \
2734  volatile unsigned long _res; \
2735  /* _argvec[0] holds current r2 across the call */ \
2736  _argvec[1] = (unsigned long)_orig.r2; \
2737  _argvec[2] = (unsigned long)_orig.nraddr; \
2738  _argvec[2+1] = (unsigned long)arg1; \
2739  _argvec[2+2] = (unsigned long)arg2; \
2740  _argvec[2+3] = (unsigned long)arg3; \
2741  __asm__ volatile( \
2742  VALGRIND_ALIGN_STACK \
2743  "mr 11,%1\n\t" \
2744  "std 2,-16(11)\n\t" /* save tocptr */ \
2745  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2746  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2747  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2748  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2749  "ld 11, 0(11)\n\t" /* target->r11 */ \
2750  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2751  "mr 11,%1\n\t" \
2752  "mr %0,3\n\t" \
2753  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2754  VALGRIND_RESTORE_STACK \
2755  : /*out*/ "=r" (_res) \
2756  : /*in*/ "r" (&_argvec[2]) \
2757  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2758  ); \
2759  lval = (__typeof__(lval)) _res; \
2760  } while (0)
2761 
2762 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2763  do { \
2764  volatile OrigFn _orig = (orig); \
2765  volatile unsigned long _argvec[3+4]; \
2766  volatile unsigned long _res; \
2767  /* _argvec[0] holds current r2 across the call */ \
2768  _argvec[1] = (unsigned long)_orig.r2; \
2769  _argvec[2] = (unsigned long)_orig.nraddr; \
2770  _argvec[2+1] = (unsigned long)arg1; \
2771  _argvec[2+2] = (unsigned long)arg2; \
2772  _argvec[2+3] = (unsigned long)arg3; \
2773  _argvec[2+4] = (unsigned long)arg4; \
2774  __asm__ volatile( \
2775  VALGRIND_ALIGN_STACK \
2776  "mr 11,%1\n\t" \
2777  "std 2,-16(11)\n\t" /* save tocptr */ \
2778  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2779  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2780  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2781  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2782  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2783  "ld 11, 0(11)\n\t" /* target->r11 */ \
2784  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2785  "mr 11,%1\n\t" \
2786  "mr %0,3\n\t" \
2787  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2788  VALGRIND_RESTORE_STACK \
2789  : /*out*/ "=r" (_res) \
2790  : /*in*/ "r" (&_argvec[2]) \
2791  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2792  ); \
2793  lval = (__typeof__(lval)) _res; \
2794  } while (0)
2795 
2796 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2797  do { \
2798  volatile OrigFn _orig = (orig); \
2799  volatile unsigned long _argvec[3+5]; \
2800  volatile unsigned long _res; \
2801  /* _argvec[0] holds current r2 across the call */ \
2802  _argvec[1] = (unsigned long)_orig.r2; \
2803  _argvec[2] = (unsigned long)_orig.nraddr; \
2804  _argvec[2+1] = (unsigned long)arg1; \
2805  _argvec[2+2] = (unsigned long)arg2; \
2806  _argvec[2+3] = (unsigned long)arg3; \
2807  _argvec[2+4] = (unsigned long)arg4; \
2808  _argvec[2+5] = (unsigned long)arg5; \
2809  __asm__ volatile( \
2810  VALGRIND_ALIGN_STACK \
2811  "mr 11,%1\n\t" \
2812  "std 2,-16(11)\n\t" /* save tocptr */ \
2813  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2814  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2815  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2816  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2817  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2818  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2819  "ld 11, 0(11)\n\t" /* target->r11 */ \
2820  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2821  "mr 11,%1\n\t" \
2822  "mr %0,3\n\t" \
2823  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2824  VALGRIND_RESTORE_STACK \
2825  : /*out*/ "=r" (_res) \
2826  : /*in*/ "r" (&_argvec[2]) \
2827  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2828  ); \
2829  lval = (__typeof__(lval)) _res; \
2830  } while (0)
2831 
2832 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2833  do { \
2834  volatile OrigFn _orig = (orig); \
2835  volatile unsigned long _argvec[3+6]; \
2836  volatile unsigned long _res; \
2837  /* _argvec[0] holds current r2 across the call */ \
2838  _argvec[1] = (unsigned long)_orig.r2; \
2839  _argvec[2] = (unsigned long)_orig.nraddr; \
2840  _argvec[2+1] = (unsigned long)arg1; \
2841  _argvec[2+2] = (unsigned long)arg2; \
2842  _argvec[2+3] = (unsigned long)arg3; \
2843  _argvec[2+4] = (unsigned long)arg4; \
2844  _argvec[2+5] = (unsigned long)arg5; \
2845  _argvec[2+6] = (unsigned long)arg6; \
2846  __asm__ volatile( \
2847  VALGRIND_ALIGN_STACK \
2848  "mr 11,%1\n\t" \
2849  "std 2,-16(11)\n\t" /* save tocptr */ \
2850  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2851  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2852  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2853  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2854  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2855  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2856  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2857  "ld 11, 0(11)\n\t" /* target->r11 */ \
2858  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2859  "mr 11,%1\n\t" \
2860  "mr %0,3\n\t" \
2861  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2862  VALGRIND_RESTORE_STACK \
2863  : /*out*/ "=r" (_res) \
2864  : /*in*/ "r" (&_argvec[2]) \
2865  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2866  ); \
2867  lval = (__typeof__(lval)) _res; \
2868  } while (0)
2869 
2870 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2871  arg7) \
2872  do { \
2873  volatile OrigFn _orig = (orig); \
2874  volatile unsigned long _argvec[3+7]; \
2875  volatile unsigned long _res; \
2876  /* _argvec[0] holds current r2 across the call */ \
2877  _argvec[1] = (unsigned long)_orig.r2; \
2878  _argvec[2] = (unsigned long)_orig.nraddr; \
2879  _argvec[2+1] = (unsigned long)arg1; \
2880  _argvec[2+2] = (unsigned long)arg2; \
2881  _argvec[2+3] = (unsigned long)arg3; \
2882  _argvec[2+4] = (unsigned long)arg4; \
2883  _argvec[2+5] = (unsigned long)arg5; \
2884  _argvec[2+6] = (unsigned long)arg6; \
2885  _argvec[2+7] = (unsigned long)arg7; \
2886  __asm__ volatile( \
2887  VALGRIND_ALIGN_STACK \
2888  "mr 11,%1\n\t" \
2889  "std 2,-16(11)\n\t" /* save tocptr */ \
2890  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2891  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2892  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2893  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2894  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2895  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2896  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2897  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2898  "ld 11, 0(11)\n\t" /* target->r11 */ \
2899  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2900  "mr 11,%1\n\t" \
2901  "mr %0,3\n\t" \
2902  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2903  VALGRIND_RESTORE_STACK \
2904  : /*out*/ "=r" (_res) \
2905  : /*in*/ "r" (&_argvec[2]) \
2906  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2907  ); \
2908  lval = (__typeof__(lval)) _res; \
2909  } while (0)
2910 
2911 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2912  arg7,arg8) \
2913  do { \
2914  volatile OrigFn _orig = (orig); \
2915  volatile unsigned long _argvec[3+8]; \
2916  volatile unsigned long _res; \
2917  /* _argvec[0] holds current r2 across the call */ \
2918  _argvec[1] = (unsigned long)_orig.r2; \
2919  _argvec[2] = (unsigned long)_orig.nraddr; \
2920  _argvec[2+1] = (unsigned long)arg1; \
2921  _argvec[2+2] = (unsigned long)arg2; \
2922  _argvec[2+3] = (unsigned long)arg3; \
2923  _argvec[2+4] = (unsigned long)arg4; \
2924  _argvec[2+5] = (unsigned long)arg5; \
2925  _argvec[2+6] = (unsigned long)arg6; \
2926  _argvec[2+7] = (unsigned long)arg7; \
2927  _argvec[2+8] = (unsigned long)arg8; \
2928  __asm__ volatile( \
2929  VALGRIND_ALIGN_STACK \
2930  "mr 11,%1\n\t" \
2931  "std 2,-16(11)\n\t" /* save tocptr */ \
2932  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2933  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2934  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2935  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2936  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2937  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2938  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2939  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2940  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2941  "ld 11, 0(11)\n\t" /* target->r11 */ \
2942  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2943  "mr 11,%1\n\t" \
2944  "mr %0,3\n\t" \
2945  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2946  VALGRIND_RESTORE_STACK \
2947  : /*out*/ "=r" (_res) \
2948  : /*in*/ "r" (&_argvec[2]) \
2949  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2950  ); \
2951  lval = (__typeof__(lval)) _res; \
2952  } while (0)
2953 
2954 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2955  arg7,arg8,arg9) \
2956  do { \
2957  volatile OrigFn _orig = (orig); \
2958  volatile unsigned long _argvec[3+9]; \
2959  volatile unsigned long _res; \
2960  /* _argvec[0] holds current r2 across the call */ \
2961  _argvec[1] = (unsigned long)_orig.r2; \
2962  _argvec[2] = (unsigned long)_orig.nraddr; \
2963  _argvec[2+1] = (unsigned long)arg1; \
2964  _argvec[2+2] = (unsigned long)arg2; \
2965  _argvec[2+3] = (unsigned long)arg3; \
2966  _argvec[2+4] = (unsigned long)arg4; \
2967  _argvec[2+5] = (unsigned long)arg5; \
2968  _argvec[2+6] = (unsigned long)arg6; \
2969  _argvec[2+7] = (unsigned long)arg7; \
2970  _argvec[2+8] = (unsigned long)arg8; \
2971  _argvec[2+9] = (unsigned long)arg9; \
2972  __asm__ volatile( \
2973  VALGRIND_ALIGN_STACK \
2974  "mr 11,%1\n\t" \
2975  "std 2,-16(11)\n\t" /* save tocptr */ \
2976  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2977  "addi 1,1,-128\n\t" /* expand stack frame */ \
2978  /* arg9 */ \
2979  "ld 3,72(11)\n\t" \
2980  "std 3,112(1)\n\t" \
2981  /* args1-8 */ \
2982  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2983  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2984  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2985  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2986  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2987  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2988  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2989  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2990  "ld 11, 0(11)\n\t" /* target->r11 */ \
2991  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2992  "mr 11,%1\n\t" \
2993  "mr %0,3\n\t" \
2994  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2995  VALGRIND_RESTORE_STACK \
2996  : /*out*/ "=r" (_res) \
2997  : /*in*/ "r" (&_argvec[2]) \
2998  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2999  ); \
3000  lval = (__typeof__(lval)) _res; \
3001  } while (0)
3002 
3003 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3004  arg7,arg8,arg9,arg10) \
3005  do { \
3006  volatile OrigFn _orig = (orig); \
3007  volatile unsigned long _argvec[3+10]; \
3008  volatile unsigned long _res; \
3009  /* _argvec[0] holds current r2 across the call */ \
3010  _argvec[1] = (unsigned long)_orig.r2; \
3011  _argvec[2] = (unsigned long)_orig.nraddr; \
3012  _argvec[2+1] = (unsigned long)arg1; \
3013  _argvec[2+2] = (unsigned long)arg2; \
3014  _argvec[2+3] = (unsigned long)arg3; \
3015  _argvec[2+4] = (unsigned long)arg4; \
3016  _argvec[2+5] = (unsigned long)arg5; \
3017  _argvec[2+6] = (unsigned long)arg6; \
3018  _argvec[2+7] = (unsigned long)arg7; \
3019  _argvec[2+8] = (unsigned long)arg8; \
3020  _argvec[2+9] = (unsigned long)arg9; \
3021  _argvec[2+10] = (unsigned long)arg10; \
3022  __asm__ volatile( \
3023  VALGRIND_ALIGN_STACK \
3024  "mr 11,%1\n\t" \
3025  "std 2,-16(11)\n\t" /* save tocptr */ \
3026  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3027  "addi 1,1,-128\n\t" /* expand stack frame */ \
3028  /* arg10 */ \
3029  "ld 3,80(11)\n\t" \
3030  "std 3,120(1)\n\t" \
3031  /* arg9 */ \
3032  "ld 3,72(11)\n\t" \
3033  "std 3,112(1)\n\t" \
3034  /* args1-8 */ \
3035  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3036  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3037  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3038  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3039  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3040  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3041  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3042  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3043  "ld 11, 0(11)\n\t" /* target->r11 */ \
3044  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3045  "mr 11,%1\n\t" \
3046  "mr %0,3\n\t" \
3047  "ld 2,-16(11)\n\t" /* restore tocptr */ \
3048  VALGRIND_RESTORE_STACK \
3049  : /*out*/ "=r" (_res) \
3050  : /*in*/ "r" (&_argvec[2]) \
3051  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3052  ); \
3053  lval = (__typeof__(lval)) _res; \
3054  } while (0)
3055 
3056 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3057  arg7,arg8,arg9,arg10,arg11) \
3058  do { \
3059  volatile OrigFn _orig = (orig); \
3060  volatile unsigned long _argvec[3+11]; \
3061  volatile unsigned long _res; \
3062  /* _argvec[0] holds current r2 across the call */ \
3063  _argvec[1] = (unsigned long)_orig.r2; \
3064  _argvec[2] = (unsigned long)_orig.nraddr; \
3065  _argvec[2+1] = (unsigned long)arg1; \
3066  _argvec[2+2] = (unsigned long)arg2; \
3067  _argvec[2+3] = (unsigned long)arg3; \
3068  _argvec[2+4] = (unsigned long)arg4; \
3069  _argvec[2+5] = (unsigned long)arg5; \
3070  _argvec[2+6] = (unsigned long)arg6; \
3071  _argvec[2+7] = (unsigned long)arg7; \
3072  _argvec[2+8] = (unsigned long)arg8; \
3073  _argvec[2+9] = (unsigned long)arg9; \
3074  _argvec[2+10] = (unsigned long)arg10; \
3075  _argvec[2+11] = (unsigned long)arg11; \
3076  __asm__ volatile( \
3077  VALGRIND_ALIGN_STACK \
3078  "mr 11,%1\n\t" \
3079  "std 2,-16(11)\n\t" /* save tocptr */ \
3080  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3081  "addi 1,1,-144\n\t" /* expand stack frame */ \
3082  /* arg11 */ \
3083  "ld 3,88(11)\n\t" \
3084  "std 3,128(1)\n\t" \
3085  /* arg10 */ \
3086  "ld 3,80(11)\n\t" \
3087  "std 3,120(1)\n\t" \
3088  /* arg9 */ \
3089  "ld 3,72(11)\n\t" \
3090  "std 3,112(1)\n\t" \
3091  /* args1-8 */ \
3092  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3093  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3094  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3095  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3096  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3097  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3098  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3099  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3100  "ld 11, 0(11)\n\t" /* target->r11 */ \
3101  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3102  "mr 11,%1\n\t" \
3103  "mr %0,3\n\t" \
3104  "ld 2,-16(11)\n\t" /* restore tocptr */ \
3105  VALGRIND_RESTORE_STACK \
3106  : /*out*/ "=r" (_res) \
3107  : /*in*/ "r" (&_argvec[2]) \
3108  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3109  ); \
3110  lval = (__typeof__(lval)) _res; \
3111  } while (0)
3112 
3113 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3114  arg7,arg8,arg9,arg10,arg11,arg12) \
3115  do { \
3116  volatile OrigFn _orig = (orig); \
3117  volatile unsigned long _argvec[3+12]; \
3118  volatile unsigned long _res; \
3119  /* _argvec[0] holds current r2 across the call */ \
3120  _argvec[1] = (unsigned long)_orig.r2; \
3121  _argvec[2] = (unsigned long)_orig.nraddr; \
3122  _argvec[2+1] = (unsigned long)arg1; \
3123  _argvec[2+2] = (unsigned long)arg2; \
3124  _argvec[2+3] = (unsigned long)arg3; \
3125  _argvec[2+4] = (unsigned long)arg4; \
3126  _argvec[2+5] = (unsigned long)arg5; \
3127  _argvec[2+6] = (unsigned long)arg6; \
3128  _argvec[2+7] = (unsigned long)arg7; \
3129  _argvec[2+8] = (unsigned long)arg8; \
3130  _argvec[2+9] = (unsigned long)arg9; \
3131  _argvec[2+10] = (unsigned long)arg10; \
3132  _argvec[2+11] = (unsigned long)arg11; \
3133  _argvec[2+12] = (unsigned long)arg12; \
3134  __asm__ volatile( \
3135  VALGRIND_ALIGN_STACK \
3136  "mr 11,%1\n\t" \
3137  "std 2,-16(11)\n\t" /* save tocptr */ \
3138  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3139  "addi 1,1,-144\n\t" /* expand stack frame */ \
3140  /* arg12 */ \
3141  "ld 3,96(11)\n\t" \
3142  "std 3,136(1)\n\t" \
3143  /* arg11 */ \
3144  "ld 3,88(11)\n\t" \
3145  "std 3,128(1)\n\t" \
3146  /* arg10 */ \
3147  "ld 3,80(11)\n\t" \
3148  "std 3,120(1)\n\t" \
3149  /* arg9 */ \
3150  "ld 3,72(11)\n\t" \
3151  "std 3,112(1)\n\t" \
3152  /* args1-8 */ \
3153  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3154  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3155  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3156  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3157  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3158  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3159  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3160  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3161  "ld 11, 0(11)\n\t" /* target->r11 */ \
3162  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3163  "mr 11,%1\n\t" \
3164  "mr %0,3\n\t" \
3165  "ld 2,-16(11)\n\t" /* restore tocptr */ \
3166  VALGRIND_RESTORE_STACK \
3167  : /*out*/ "=r" (_res) \
3168  : /*in*/ "r" (&_argvec[2]) \
3169  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3170  ); \
3171  lval = (__typeof__(lval)) _res; \
3172  } while (0)
3173 
3174 #endif /* PLAT_ppc64be_linux */
3175 
3176 /* ------------------------- ppc64le-linux ----------------------- */
3177 #if defined(PLAT_ppc64le_linux)
3178 
3179 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3180 
3181 /* These regs are trashed by the hidden call. */
3182 #define __CALLER_SAVED_REGS \
3183  "lr", "ctr", "xer", \
3184  "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3185  "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3186  "r11", "r12", "r13"
3187 
3188 /* Macros to save and align the stack before making a function
3189  call and restore it afterwards as gcc may not keep the stack
3190  pointer aligned if it doesn't realize calls are being made
3191  to other functions. */
3192 
3193 #define VALGRIND_ALIGN_STACK \
3194  "mr 28,1\n\t" \
3195  "rldicr 1,1,0,59\n\t"
3196 #define VALGRIND_RESTORE_STACK \
3197  "mr 1,28\n\t"
3198 
3199 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3200  long) == 8. */
3201 
3202 #define CALL_FN_W_v(lval, orig) \
3203  do { \
3204  volatile OrigFn _orig = (orig); \
3205  volatile unsigned long _argvec[3+0]; \
3206  volatile unsigned long _res; \
3207  /* _argvec[0] holds current r2 across the call */ \
3208  _argvec[1] = (unsigned long)_orig.r2; \
3209  _argvec[2] = (unsigned long)_orig.nraddr; \
3210  __asm__ volatile( \
3211  VALGRIND_ALIGN_STACK \
3212  "mr 12,%1\n\t" \
3213  "std 2,-16(12)\n\t" /* save tocptr */ \
3214  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3215  "ld 12, 0(12)\n\t" /* target->r12 */ \
3216  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3217  "mr 12,%1\n\t" \
3218  "mr %0,3\n\t" \
3219  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3220  VALGRIND_RESTORE_STACK \
3221  : /*out*/ "=r" (_res) \
3222  : /*in*/ "r" (&_argvec[2]) \
3223  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3224  ); \
3225  lval = (__typeof__(lval)) _res; \
3226  } while (0)
3227 
3228 #define CALL_FN_W_W(lval, orig, arg1) \
3229  do { \
3230  volatile OrigFn _orig = (orig); \
3231  volatile unsigned long _argvec[3+1]; \
3232  volatile unsigned long _res; \
3233  /* _argvec[0] holds current r2 across the call */ \
3234  _argvec[1] = (unsigned long)_orig.r2; \
3235  _argvec[2] = (unsigned long)_orig.nraddr; \
3236  _argvec[2+1] = (unsigned long)arg1; \
3237  __asm__ volatile( \
3238  VALGRIND_ALIGN_STACK \
3239  "mr 12,%1\n\t" \
3240  "std 2,-16(12)\n\t" /* save tocptr */ \
3241  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3242  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3243  "ld 12, 0(12)\n\t" /* target->r12 */ \
3244  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3245  "mr 12,%1\n\t" \
3246  "mr %0,3\n\t" \
3247  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3248  VALGRIND_RESTORE_STACK \
3249  : /*out*/ "=r" (_res) \
3250  : /*in*/ "r" (&_argvec[2]) \
3251  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3252  ); \
3253  lval = (__typeof__(lval)) _res; \
3254  } while (0)
3255 
3256 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3257  do { \
3258  volatile OrigFn _orig = (orig); \
3259  volatile unsigned long _argvec[3+2]; \
3260  volatile unsigned long _res; \
3261  /* _argvec[0] holds current r2 across the call */ \
3262  _argvec[1] = (unsigned long)_orig.r2; \
3263  _argvec[2] = (unsigned long)_orig.nraddr; \
3264  _argvec[2+1] = (unsigned long)arg1; \
3265  _argvec[2+2] = (unsigned long)arg2; \
3266  __asm__ volatile( \
3267  VALGRIND_ALIGN_STACK \
3268  "mr 12,%1\n\t" \
3269  "std 2,-16(12)\n\t" /* save tocptr */ \
3270  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3271  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3272  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3273  "ld 12, 0(12)\n\t" /* target->r12 */ \
3274  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3275  "mr 12,%1\n\t" \
3276  "mr %0,3\n\t" \
3277  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3278  VALGRIND_RESTORE_STACK \
3279  : /*out*/ "=r" (_res) \
3280  : /*in*/ "r" (&_argvec[2]) \
3281  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3282  ); \
3283  lval = (__typeof__(lval)) _res; \
3284  } while (0)
3285 
3286 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3287  do { \
3288  volatile OrigFn _orig = (orig); \
3289  volatile unsigned long _argvec[3+3]; \
3290  volatile unsigned long _res; \
3291  /* _argvec[0] holds current r2 across the call */ \
3292  _argvec[1] = (unsigned long)_orig.r2; \
3293  _argvec[2] = (unsigned long)_orig.nraddr; \
3294  _argvec[2+1] = (unsigned long)arg1; \
3295  _argvec[2+2] = (unsigned long)arg2; \
3296  _argvec[2+3] = (unsigned long)arg3; \
3297  __asm__ volatile( \
3298  VALGRIND_ALIGN_STACK \
3299  "mr 12,%1\n\t" \
3300  "std 2,-16(12)\n\t" /* save tocptr */ \
3301  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3302  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3303  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3304  "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3305  "ld 12, 0(12)\n\t" /* target->r12 */ \
3306  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3307  "mr 12,%1\n\t" \
3308  "mr %0,3\n\t" \
3309  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3310  VALGRIND_RESTORE_STACK \
3311  : /*out*/ "=r" (_res) \
3312  : /*in*/ "r" (&_argvec[2]) \
3313  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3314  ); \
3315  lval = (__typeof__(lval)) _res; \
3316  } while (0)
3317 
3318 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3319  do { \
3320  volatile OrigFn _orig = (orig); \
3321  volatile unsigned long _argvec[3+4]; \
3322  volatile unsigned long _res; \
3323  /* _argvec[0] holds current r2 across the call */ \
3324  _argvec[1] = (unsigned long)_orig.r2; \
3325  _argvec[2] = (unsigned long)_orig.nraddr; \
3326  _argvec[2+1] = (unsigned long)arg1; \
3327  _argvec[2+2] = (unsigned long)arg2; \
3328  _argvec[2+3] = (unsigned long)arg3; \
3329  _argvec[2+4] = (unsigned long)arg4; \
3330  __asm__ volatile( \
3331  VALGRIND_ALIGN_STACK \
3332  "mr 12,%1\n\t" \
3333  "std 2,-16(12)\n\t" /* save tocptr */ \
3334  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3335  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3336  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3337  "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3338  "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3339  "ld 12, 0(12)\n\t" /* target->r12 */ \
3340  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3341  "mr 12,%1\n\t" \
3342  "mr %0,3\n\t" \
3343  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3344  VALGRIND_RESTORE_STACK \
3345  : /*out*/ "=r" (_res) \
3346  : /*in*/ "r" (&_argvec[2]) \
3347  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3348  ); \
3349  lval = (__typeof__(lval)) _res; \
3350  } while (0)
3351 
3352 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3353  do { \
3354  volatile OrigFn _orig = (orig); \
3355  volatile unsigned long _argvec[3+5]; \
3356  volatile unsigned long _res; \
3357  /* _argvec[0] holds current r2 across the call */ \
3358  _argvec[1] = (unsigned long)_orig.r2; \
3359  _argvec[2] = (unsigned long)_orig.nraddr; \
3360  _argvec[2+1] = (unsigned long)arg1; \
3361  _argvec[2+2] = (unsigned long)arg2; \
3362  _argvec[2+3] = (unsigned long)arg3; \
3363  _argvec[2+4] = (unsigned long)arg4; \
3364  _argvec[2+5] = (unsigned long)arg5; \
3365  __asm__ volatile( \
3366  VALGRIND_ALIGN_STACK \
3367  "mr 12,%1\n\t" \
3368  "std 2,-16(12)\n\t" /* save tocptr */ \
3369  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3370  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3371  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3372  "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3373  "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3374  "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3375  "ld 12, 0(12)\n\t" /* target->r12 */ \
3376  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3377  "mr 12,%1\n\t" \
3378  "mr %0,3\n\t" \
3379  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3380  VALGRIND_RESTORE_STACK \
3381  : /*out*/ "=r" (_res) \
3382  : /*in*/ "r" (&_argvec[2]) \
3383  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3384  ); \
3385  lval = (__typeof__(lval)) _res; \
3386  } while (0)
3387 
3388 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3389  do { \
3390  volatile OrigFn _orig = (orig); \
3391  volatile unsigned long _argvec[3+6]; \
3392  volatile unsigned long _res; \
3393  /* _argvec[0] holds current r2 across the call */ \
3394  _argvec[1] = (unsigned long)_orig.r2; \
3395  _argvec[2] = (unsigned long)_orig.nraddr; \
3396  _argvec[2+1] = (unsigned long)arg1; \
3397  _argvec[2+2] = (unsigned long)arg2; \
3398  _argvec[2+3] = (unsigned long)arg3; \
3399  _argvec[2+4] = (unsigned long)arg4; \
3400  _argvec[2+5] = (unsigned long)arg5; \
3401  _argvec[2+6] = (unsigned long)arg6; \
3402  __asm__ volatile( \
3403  VALGRIND_ALIGN_STACK \
3404  "mr 12,%1\n\t" \
3405  "std 2,-16(12)\n\t" /* save tocptr */ \
3406  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3407  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3408  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3409  "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3410  "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3411  "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3412  "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3413  "ld 12, 0(12)\n\t" /* target->r12 */ \
3414  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3415  "mr 12,%1\n\t" \
3416  "mr %0,3\n\t" \
3417  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3418  VALGRIND_RESTORE_STACK \
3419  : /*out*/ "=r" (_res) \
3420  : /*in*/ "r" (&_argvec[2]) \
3421  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3422  ); \
3423  lval = (__typeof__(lval)) _res; \
3424  } while (0)
3425 
3426 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3427  arg7) \
3428  do { \
3429  volatile OrigFn _orig = (orig); \
3430  volatile unsigned long _argvec[3+7]; \
3431  volatile unsigned long _res; \
3432  /* _argvec[0] holds current r2 across the call */ \
3433  _argvec[1] = (unsigned long)_orig.r2; \
3434  _argvec[2] = (unsigned long)_orig.nraddr; \
3435  _argvec[2+1] = (unsigned long)arg1; \
3436  _argvec[2+2] = (unsigned long)arg2; \
3437  _argvec[2+3] = (unsigned long)arg3; \
3438  _argvec[2+4] = (unsigned long)arg4; \
3439  _argvec[2+5] = (unsigned long)arg5; \
3440  _argvec[2+6] = (unsigned long)arg6; \
3441  _argvec[2+7] = (unsigned long)arg7; \
3442  __asm__ volatile( \
3443  VALGRIND_ALIGN_STACK \
3444  "mr 12,%1\n\t" \
3445  "std 2,-16(12)\n\t" /* save tocptr */ \
3446  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3447  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3448  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3449  "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3450  "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3451  "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3452  "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3453  "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3454  "ld 12, 0(12)\n\t" /* target->r12 */ \
3455  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3456  "mr 12,%1\n\t" \
3457  "mr %0,3\n\t" \
3458  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3459  VALGRIND_RESTORE_STACK \
3460  : /*out*/ "=r" (_res) \
3461  : /*in*/ "r" (&_argvec[2]) \
3462  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3463  ); \
3464  lval = (__typeof__(lval)) _res; \
3465  } while (0)
3466 
3467 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3468  arg7,arg8) \
3469  do { \
3470  volatile OrigFn _orig = (orig); \
3471  volatile unsigned long _argvec[3+8]; \
3472  volatile unsigned long _res; \
3473  /* _argvec[0] holds current r2 across the call */ \
3474  _argvec[1] = (unsigned long)_orig.r2; \
3475  _argvec[2] = (unsigned long)_orig.nraddr; \
3476  _argvec[2+1] = (unsigned long)arg1; \
3477  _argvec[2+2] = (unsigned long)arg2; \
3478  _argvec[2+3] = (unsigned long)arg3; \
3479  _argvec[2+4] = (unsigned long)arg4; \
3480  _argvec[2+5] = (unsigned long)arg5; \
3481  _argvec[2+6] = (unsigned long)arg6; \
3482  _argvec[2+7] = (unsigned long)arg7; \
3483  _argvec[2+8] = (unsigned long)arg8; \
3484  __asm__ volatile( \
3485  VALGRIND_ALIGN_STACK \
3486  "mr 12,%1\n\t" \
3487  "std 2,-16(12)\n\t" /* save tocptr */ \
3488  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3489  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3490  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3491  "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3492  "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3493  "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3494  "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3495  "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3496  "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3497  "ld 12, 0(12)\n\t" /* target->r12 */ \
3498  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3499  "mr 12,%1\n\t" \
3500  "mr %0,3\n\t" \
3501  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3502  VALGRIND_RESTORE_STACK \
3503  : /*out*/ "=r" (_res) \
3504  : /*in*/ "r" (&_argvec[2]) \
3505  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3506  ); \
3507  lval = (__typeof__(lval)) _res; \
3508  } while (0)
3509 
3510 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3511  arg7,arg8,arg9) \
3512  do { \
3513  volatile OrigFn _orig = (orig); \
3514  volatile unsigned long _argvec[3+9]; \
3515  volatile unsigned long _res; \
3516  /* _argvec[0] holds current r2 across the call */ \
3517  _argvec[1] = (unsigned long)_orig.r2; \
3518  _argvec[2] = (unsigned long)_orig.nraddr; \
3519  _argvec[2+1] = (unsigned long)arg1; \
3520  _argvec[2+2] = (unsigned long)arg2; \
3521  _argvec[2+3] = (unsigned long)arg3; \
3522  _argvec[2+4] = (unsigned long)arg4; \
3523  _argvec[2+5] = (unsigned long)arg5; \
3524  _argvec[2+6] = (unsigned long)arg6; \
3525  _argvec[2+7] = (unsigned long)arg7; \
3526  _argvec[2+8] = (unsigned long)arg8; \
3527  _argvec[2+9] = (unsigned long)arg9; \
3528  __asm__ volatile( \
3529  VALGRIND_ALIGN_STACK \
3530  "mr 12,%1\n\t" \
3531  "std 2,-16(12)\n\t" /* save tocptr */ \
3532  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3533  "addi 1,1,-128\n\t" /* expand stack frame */ \
3534  /* arg9 */ \
3535  "ld 3,72(12)\n\t" \
3536  "std 3,96(1)\n\t" \
3537  /* args1-8 */ \
3538  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3539  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3540  "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3541  "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3542  "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3543  "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3544  "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3545  "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3546  "ld 12, 0(12)\n\t" /* target->r12 */ \
3547  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3548  "mr 12,%1\n\t" \
3549  "mr %0,3\n\t" \
3550  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3551  VALGRIND_RESTORE_STACK \
3552  : /*out*/ "=r" (_res) \
3553  : /*in*/ "r" (&_argvec[2]) \
3554  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3555  ); \
3556  lval = (__typeof__(lval)) _res; \
3557  } while (0)
3558 
3559 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3560  arg7,arg8,arg9,arg10) \
3561  do { \
3562  volatile OrigFn _orig = (orig); \
3563  volatile unsigned long _argvec[3+10]; \
3564  volatile unsigned long _res; \
3565  /* _argvec[0] holds current r2 across the call */ \
3566  _argvec[1] = (unsigned long)_orig.r2; \
3567  _argvec[2] = (unsigned long)_orig.nraddr; \
3568  _argvec[2+1] = (unsigned long)arg1; \
3569  _argvec[2+2] = (unsigned long)arg2; \
3570  _argvec[2+3] = (unsigned long)arg3; \
3571  _argvec[2+4] = (unsigned long)arg4; \
3572  _argvec[2+5] = (unsigned long)arg5; \
3573  _argvec[2+6] = (unsigned long)arg6; \
3574  _argvec[2+7] = (unsigned long)arg7; \
3575  _argvec[2+8] = (unsigned long)arg8; \
3576  _argvec[2+9] = (unsigned long)arg9; \
3577  _argvec[2+10] = (unsigned long)arg10; \
3578  __asm__ volatile( \
3579  VALGRIND_ALIGN_STACK \
3580  "mr 12,%1\n\t" \
3581  "std 2,-16(12)\n\t" /* save tocptr */ \
3582  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3583  "addi 1,1,-128\n\t" /* expand stack frame */ \
3584  /* arg10 */ \
3585  "ld 3,80(12)\n\t" \
3586  "std 3,104(1)\n\t" \
3587  /* arg9 */ \
3588  "ld 3,72(12)\n\t" \
3589  "std 3,96(1)\n\t" \
3590  /* args1-8 */ \
3591  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3592  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3593  "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3594  "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3595  "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3596  "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3597  "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3598  "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3599  "ld 12, 0(12)\n\t" /* target->r12 */ \
3600  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3601  "mr 12,%1\n\t" \
3602  "mr %0,3\n\t" \
3603  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3604  VALGRIND_RESTORE_STACK \
3605  : /*out*/ "=r" (_res) \
3606  : /*in*/ "r" (&_argvec[2]) \
3607  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3608  ); \
3609  lval = (__typeof__(lval)) _res; \
3610  } while (0)
3611 
3612 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3613  arg7,arg8,arg9,arg10,arg11) \
3614  do { \
3615  volatile OrigFn _orig = (orig); \
3616  volatile unsigned long _argvec[3+11]; \
3617  volatile unsigned long _res; \
3618  /* _argvec[0] holds current r2 across the call */ \
3619  _argvec[1] = (unsigned long)_orig.r2; \
3620  _argvec[2] = (unsigned long)_orig.nraddr; \
3621  _argvec[2+1] = (unsigned long)arg1; \
3622  _argvec[2+2] = (unsigned long)arg2; \
3623  _argvec[2+3] = (unsigned long)arg3; \
3624  _argvec[2+4] = (unsigned long)arg4; \
3625  _argvec[2+5] = (unsigned long)arg5; \
3626  _argvec[2+6] = (unsigned long)arg6; \
3627  _argvec[2+7] = (unsigned long)arg7; \
3628  _argvec[2+8] = (unsigned long)arg8; \
3629  _argvec[2+9] = (unsigned long)arg9; \
3630  _argvec[2+10] = (unsigned long)arg10; \
3631  _argvec[2+11] = (unsigned long)arg11; \
3632  __asm__ volatile( \
3633  VALGRIND_ALIGN_STACK \
3634  "mr 12,%1\n\t" \
3635  "std 2,-16(12)\n\t" /* save tocptr */ \
3636  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3637  "addi 1,1,-144\n\t" /* expand stack frame */ \
3638  /* arg11 */ \
3639  "ld 3,88(12)\n\t" \
3640  "std 3,112(1)\n\t" \
3641  /* arg10 */ \
3642  "ld 3,80(12)\n\t" \
3643  "std 3,104(1)\n\t" \
3644  /* arg9 */ \
3645  "ld 3,72(12)\n\t" \
3646  "std 3,96(1)\n\t" \
3647  /* args1-8 */ \
3648  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3649  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3650  "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3651  "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3652  "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3653  "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3654  "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3655  "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3656  "ld 12, 0(12)\n\t" /* target->r12 */ \
3657  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3658  "mr 12,%1\n\t" \
3659  "mr %0,3\n\t" \
3660  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3661  VALGRIND_RESTORE_STACK \
3662  : /*out*/ "=r" (_res) \
3663  : /*in*/ "r" (&_argvec[2]) \
3664  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3665  ); \
3666  lval = (__typeof__(lval)) _res; \
3667  } while (0)
3668 
3669 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3670  arg7,arg8,arg9,arg10,arg11,arg12) \
3671  do { \
3672  volatile OrigFn _orig = (orig); \
3673  volatile unsigned long _argvec[3+12]; \
3674  volatile unsigned long _res; \
3675  /* _argvec[0] holds current r2 across the call */ \
3676  _argvec[1] = (unsigned long)_orig.r2; \
3677  _argvec[2] = (unsigned long)_orig.nraddr; \
3678  _argvec[2+1] = (unsigned long)arg1; \
3679  _argvec[2+2] = (unsigned long)arg2; \
3680  _argvec[2+3] = (unsigned long)arg3; \
3681  _argvec[2+4] = (unsigned long)arg4; \
3682  _argvec[2+5] = (unsigned long)arg5; \
3683  _argvec[2+6] = (unsigned long)arg6; \
3684  _argvec[2+7] = (unsigned long)arg7; \
3685  _argvec[2+8] = (unsigned long)arg8; \
3686  _argvec[2+9] = (unsigned long)arg9; \
3687  _argvec[2+10] = (unsigned long)arg10; \
3688  _argvec[2+11] = (unsigned long)arg11; \
3689  _argvec[2+12] = (unsigned long)arg12; \
3690  __asm__ volatile( \
3691  VALGRIND_ALIGN_STACK \
3692  "mr 12,%1\n\t" \
3693  "std 2,-16(12)\n\t" /* save tocptr */ \
3694  "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3695  "addi 1,1,-144\n\t" /* expand stack frame */ \
3696  /* arg12 */ \
3697  "ld 3,96(12)\n\t" \
3698  "std 3,120(1)\n\t" \
3699  /* arg11 */ \
3700  "ld 3,88(12)\n\t" \
3701  "std 3,112(1)\n\t" \
3702  /* arg10 */ \
3703  "ld 3,80(12)\n\t" \
3704  "std 3,104(1)\n\t" \
3705  /* arg9 */ \
3706  "ld 3,72(12)\n\t" \
3707  "std 3,96(1)\n\t" \
3708  /* args1-8 */ \
3709  "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3710  "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3711  "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3712  "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3713  "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3714  "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3715  "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3716  "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3717  "ld 12, 0(12)\n\t" /* target->r12 */ \
3718  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3719  "mr 12,%1\n\t" \
3720  "mr %0,3\n\t" \
3721  "ld 2,-16(12)\n\t" /* restore tocptr */ \
3722  VALGRIND_RESTORE_STACK \
3723  : /*out*/ "=r" (_res) \
3724  : /*in*/ "r" (&_argvec[2]) \
3725  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3726  ); \
3727  lval = (__typeof__(lval)) _res; \
3728  } while (0)
3729 
3730 #endif /* PLAT_ppc64le_linux */
3731 
3732 /* ------------------------- arm-linux ------------------------- */
3733 
3734 #if defined(PLAT_arm_linux)
3735 
3736 /* These regs are trashed by the hidden call. */
3737 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3738 
3739 /* Macros to save and align the stack before making a function
3740  call and restore it afterwards as gcc may not keep the stack
3741  pointer aligned if it doesn't realize calls are being made
3742  to other functions. */
3743 
3744 /* This is a bit tricky. We store the original stack pointer in r10
3745  as it is callee-saves. gcc doesn't allow the use of r11 for some
3746  reason. Also, we can't directly "bic" the stack pointer in thumb
3747  mode since r13 isn't an allowed register number in that context.
3748  So use r4 as a temporary, since that is about to get trashed
3749  anyway, just after each use of this macro. Side effect is we need
3750  to be very careful about any future changes, since
3751  VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3752 #define VALGRIND_ALIGN_STACK \
3753  "mov r10, sp\n\t" \
3754  "mov r4, sp\n\t" \
3755  "bic r4, r4, #7\n\t" \
3756  "mov sp, r4\n\t"
3757 #define VALGRIND_RESTORE_STACK \
3758  "mov sp, r10\n\t"
3759 
3760 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3761  long) == 4. */
3762 
3763 #define CALL_FN_W_v(lval, orig) \
3764  do { \
3765  volatile OrigFn _orig = (orig); \
3766  volatile unsigned long _argvec[1]; \
3767  volatile unsigned long _res; \
3768  _argvec[0] = (unsigned long)_orig.nraddr; \
3769  __asm__ volatile( \
3770  VALGRIND_ALIGN_STACK \
3771  "ldr r4, [%1] \n\t" /* target->r4 */ \
3772  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3773  VALGRIND_RESTORE_STACK \
3774  "mov %0, r0\n" \
3775  : /*out*/ "=r" (_res) \
3776  : /*in*/ "0" (&_argvec[0]) \
3777  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3778  ); \
3779  lval = (__typeof__(lval)) _res; \
3780  } while (0)
3781 
3782 #define CALL_FN_W_W(lval, orig, arg1) \
3783  do { \
3784  volatile OrigFn _orig = (orig); \
3785  volatile unsigned long _argvec[2]; \
3786  volatile unsigned long _res; \
3787  _argvec[0] = (unsigned long)_orig.nraddr; \
3788  _argvec[1] = (unsigned long)(arg1); \
3789  __asm__ volatile( \
3790  VALGRIND_ALIGN_STACK \
3791  "ldr r0, [%1, #4] \n\t" \
3792  "ldr r4, [%1] \n\t" /* target->r4 */ \
3793  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3794  VALGRIND_RESTORE_STACK \
3795  "mov %0, r0\n" \
3796  : /*out*/ "=r" (_res) \
3797  : /*in*/ "0" (&_argvec[0]) \
3798  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3799  ); \
3800  lval = (__typeof__(lval)) _res; \
3801  } while (0)
3802 
3803 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3804  do { \
3805  volatile OrigFn _orig = (orig); \
3806  volatile unsigned long _argvec[3]; \
3807  volatile unsigned long _res; \
3808  _argvec[0] = (unsigned long)_orig.nraddr; \
3809  _argvec[1] = (unsigned long)(arg1); \
3810  _argvec[2] = (unsigned long)(arg2); \
3811  __asm__ volatile( \
3812  VALGRIND_ALIGN_STACK \
3813  "ldr r0, [%1, #4] \n\t" \
3814  "ldr r1, [%1, #8] \n\t" \
3815  "ldr r4, [%1] \n\t" /* target->r4 */ \
3816  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3817  VALGRIND_RESTORE_STACK \
3818  "mov %0, r0\n" \
3819  : /*out*/ "=r" (_res) \
3820  : /*in*/ "0" (&_argvec[0]) \
3821  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3822  ); \
3823  lval = (__typeof__(lval)) _res; \
3824  } while (0)
3825 
3826 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3827  do { \
3828  volatile OrigFn _orig = (orig); \
3829  volatile unsigned long _argvec[4]; \
3830  volatile unsigned long _res; \
3831  _argvec[0] = (unsigned long)_orig.nraddr; \
3832  _argvec[1] = (unsigned long)(arg1); \
3833  _argvec[2] = (unsigned long)(arg2); \
3834  _argvec[3] = (unsigned long)(arg3); \
3835  __asm__ volatile( \
3836  VALGRIND_ALIGN_STACK \
3837  "ldr r0, [%1, #4] \n\t" \
3838  "ldr r1, [%1, #8] \n\t" \
3839  "ldr r2, [%1, #12] \n\t" \
3840  "ldr r4, [%1] \n\t" /* target->r4 */ \
3841  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3842  VALGRIND_RESTORE_STACK \
3843  "mov %0, r0\n" \
3844  : /*out*/ "=r" (_res) \
3845  : /*in*/ "0" (&_argvec[0]) \
3846  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3847  ); \
3848  lval = (__typeof__(lval)) _res; \
3849  } while (0)
3850 
3851 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3852  do { \
3853  volatile OrigFn _orig = (orig); \
3854  volatile unsigned long _argvec[5]; \
3855  volatile unsigned long _res; \
3856  _argvec[0] = (unsigned long)_orig.nraddr; \
3857  _argvec[1] = (unsigned long)(arg1); \
3858  _argvec[2] = (unsigned long)(arg2); \
3859  _argvec[3] = (unsigned long)(arg3); \
3860  _argvec[4] = (unsigned long)(arg4); \
3861  __asm__ volatile( \
3862  VALGRIND_ALIGN_STACK \
3863  "ldr r0, [%1, #4] \n\t" \
3864  "ldr r1, [%1, #8] \n\t" \
3865  "ldr r2, [%1, #12] \n\t" \
3866  "ldr r3, [%1, #16] \n\t" \
3867  "ldr r4, [%1] \n\t" /* target->r4 */ \
3868  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3869  VALGRIND_RESTORE_STACK \
3870  "mov %0, r0" \
3871  : /*out*/ "=r" (_res) \
3872  : /*in*/ "0" (&_argvec[0]) \
3873  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3874  ); \
3875  lval = (__typeof__(lval)) _res; \
3876  } while (0)
3877 
3878 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3879  do { \
3880  volatile OrigFn _orig = (orig); \
3881  volatile unsigned long _argvec[6]; \
3882  volatile unsigned long _res; \
3883  _argvec[0] = (unsigned long)_orig.nraddr; \
3884  _argvec[1] = (unsigned long)(arg1); \
3885  _argvec[2] = (unsigned long)(arg2); \
3886  _argvec[3] = (unsigned long)(arg3); \
3887  _argvec[4] = (unsigned long)(arg4); \
3888  _argvec[5] = (unsigned long)(arg5); \
3889  __asm__ volatile( \
3890  VALGRIND_ALIGN_STACK \
3891  "sub sp, sp, #4 \n\t" \
3892  "ldr r0, [%1, #20] \n\t" \
3893  "push {r0} \n\t" \
3894  "ldr r0, [%1, #4] \n\t" \
3895  "ldr r1, [%1, #8] \n\t" \
3896  "ldr r2, [%1, #12] \n\t" \
3897  "ldr r3, [%1, #16] \n\t" \
3898  "ldr r4, [%1] \n\t" /* target->r4 */ \
3899  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3900  VALGRIND_RESTORE_STACK \
3901  "mov %0, r0" \
3902  : /*out*/ "=r" (_res) \
3903  : /*in*/ "0" (&_argvec[0]) \
3904  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3905  ); \
3906  lval = (__typeof__(lval)) _res; \
3907  } while (0)
3908 
3909 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3910  do { \
3911  volatile OrigFn _orig = (orig); \
3912  volatile unsigned long _argvec[7]; \
3913  volatile unsigned long _res; \
3914  _argvec[0] = (unsigned long)_orig.nraddr; \
3915  _argvec[1] = (unsigned long)(arg1); \
3916  _argvec[2] = (unsigned long)(arg2); \
3917  _argvec[3] = (unsigned long)(arg3); \
3918  _argvec[4] = (unsigned long)(arg4); \
3919  _argvec[5] = (unsigned long)(arg5); \
3920  _argvec[6] = (unsigned long)(arg6); \
3921  __asm__ volatile( \
3922  VALGRIND_ALIGN_STACK \
3923  "ldr r0, [%1, #20] \n\t" \
3924  "ldr r1, [%1, #24] \n\t" \
3925  "push {r0, r1} \n\t" \
3926  "ldr r0, [%1, #4] \n\t" \
3927  "ldr r1, [%1, #8] \n\t" \
3928  "ldr r2, [%1, #12] \n\t" \
3929  "ldr r3, [%1, #16] \n\t" \
3930  "ldr r4, [%1] \n\t" /* target->r4 */ \
3931  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3932  VALGRIND_RESTORE_STACK \
3933  "mov %0, r0" \
3934  : /*out*/ "=r" (_res) \
3935  : /*in*/ "0" (&_argvec[0]) \
3936  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3937  ); \
3938  lval = (__typeof__(lval)) _res; \
3939  } while (0)
3940 
3941 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3942  arg7) \
3943  do { \
3944  volatile OrigFn _orig = (orig); \
3945  volatile unsigned long _argvec[8]; \
3946  volatile unsigned long _res; \
3947  _argvec[0] = (unsigned long)_orig.nraddr; \
3948  _argvec[1] = (unsigned long)(arg1); \
3949  _argvec[2] = (unsigned long)(arg2); \
3950  _argvec[3] = (unsigned long)(arg3); \
3951  _argvec[4] = (unsigned long)(arg4); \
3952  _argvec[5] = (unsigned long)(arg5); \
3953  _argvec[6] = (unsigned long)(arg6); \
3954  _argvec[7] = (unsigned long)(arg7); \
3955  __asm__ volatile( \
3956  VALGRIND_ALIGN_STACK \
3957  "sub sp, sp, #4 \n\t" \
3958  "ldr r0, [%1, #20] \n\t" \
3959  "ldr r1, [%1, #24] \n\t" \
3960  "ldr r2, [%1, #28] \n\t" \
3961  "push {r0, r1, r2} \n\t" \
3962  "ldr r0, [%1, #4] \n\t" \
3963  "ldr r1, [%1, #8] \n\t" \
3964  "ldr r2, [%1, #12] \n\t" \
3965  "ldr r3, [%1, #16] \n\t" \
3966  "ldr r4, [%1] \n\t" /* target->r4 */ \
3967  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3968  VALGRIND_RESTORE_STACK \
3969  "mov %0, r0" \
3970  : /*out*/ "=r" (_res) \
3971  : /*in*/ "0" (&_argvec[0]) \
3972  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3973  ); \
3974  lval = (__typeof__(lval)) _res; \
3975  } while (0)
3976 
3977 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3978  arg7,arg8) \
3979  do { \
3980  volatile OrigFn _orig = (orig); \
3981  volatile unsigned long _argvec[9]; \
3982  volatile unsigned long _res; \
3983  _argvec[0] = (unsigned long)_orig.nraddr; \
3984  _argvec[1] = (unsigned long)(arg1); \
3985  _argvec[2] = (unsigned long)(arg2); \
3986  _argvec[3] = (unsigned long)(arg3); \
3987  _argvec[4] = (unsigned long)(arg4); \
3988  _argvec[5] = (unsigned long)(arg5); \
3989  _argvec[6] = (unsigned long)(arg6); \
3990  _argvec[7] = (unsigned long)(arg7); \
3991  _argvec[8] = (unsigned long)(arg8); \
3992  __asm__ volatile( \
3993  VALGRIND_ALIGN_STACK \
3994  "ldr r0, [%1, #20] \n\t" \
3995  "ldr r1, [%1, #24] \n\t" \
3996  "ldr r2, [%1, #28] \n\t" \
3997  "ldr r3, [%1, #32] \n\t" \
3998  "push {r0, r1, r2, r3} \n\t" \
3999  "ldr r0, [%1, #4] \n\t" \
4000  "ldr r1, [%1, #8] \n\t" \
4001  "ldr r2, [%1, #12] \n\t" \
4002  "ldr r3, [%1, #16] \n\t" \
4003  "ldr r4, [%1] \n\t" /* target->r4 */ \
4004  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4005  VALGRIND_RESTORE_STACK \
4006  "mov %0, r0" \
4007  : /*out*/ "=r" (_res) \
4008  : /*in*/ "0" (&_argvec[0]) \
4009  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4010  ); \
4011  lval = (__typeof__(lval)) _res; \
4012  } while (0)
4013 
4014 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4015  arg7,arg8,arg9) \
4016  do { \
4017  volatile OrigFn _orig = (orig); \
4018  volatile unsigned long _argvec[10]; \
4019  volatile unsigned long _res; \
4020  _argvec[0] = (unsigned long)_orig.nraddr; \
4021  _argvec[1] = (unsigned long)(arg1); \
4022  _argvec[2] = (unsigned long)(arg2); \
4023  _argvec[3] = (unsigned long)(arg3); \
4024  _argvec[4] = (unsigned long)(arg4); \
4025  _argvec[5] = (unsigned long)(arg5); \
4026  _argvec[6] = (unsigned long)(arg6); \
4027  _argvec[7] = (unsigned long)(arg7); \
4028  _argvec[8] = (unsigned long)(arg8); \
4029  _argvec[9] = (unsigned long)(arg9); \
4030  __asm__ volatile( \
4031  VALGRIND_ALIGN_STACK \
4032  "sub sp, sp, #4 \n\t" \
4033  "ldr r0, [%1, #20] \n\t" \
4034  "ldr r1, [%1, #24] \n\t" \
4035  "ldr r2, [%1, #28] \n\t" \
4036  "ldr r3, [%1, #32] \n\t" \
4037  "ldr r4, [%1, #36] \n\t" \
4038  "push {r0, r1, r2, r3, r4} \n\t" \
4039  "ldr r0, [%1, #4] \n\t" \
4040  "ldr r1, [%1, #8] \n\t" \
4041  "ldr r2, [%1, #12] \n\t" \
4042  "ldr r3, [%1, #16] \n\t" \
4043  "ldr r4, [%1] \n\t" /* target->r4 */ \
4044  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4045  VALGRIND_RESTORE_STACK \
4046  "mov %0, r0" \
4047  : /*out*/ "=r" (_res) \
4048  : /*in*/ "0" (&_argvec[0]) \
4049  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4050  ); \
4051  lval = (__typeof__(lval)) _res; \
4052  } while (0)
4053 
4054 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4055  arg7,arg8,arg9,arg10) \
4056  do { \
4057  volatile OrigFn _orig = (orig); \
4058  volatile unsigned long _argvec[11]; \
4059  volatile unsigned long _res; \
4060  _argvec[0] = (unsigned long)_orig.nraddr; \
4061  _argvec[1] = (unsigned long)(arg1); \
4062  _argvec[2] = (unsigned long)(arg2); \
4063  _argvec[3] = (unsigned long)(arg3); \
4064  _argvec[4] = (unsigned long)(arg4); \
4065  _argvec[5] = (unsigned long)(arg5); \
4066  _argvec[6] = (unsigned long)(arg6); \
4067  _argvec[7] = (unsigned long)(arg7); \
4068  _argvec[8] = (unsigned long)(arg8); \
4069  _argvec[9] = (unsigned long)(arg9); \
4070  _argvec[10] = (unsigned long)(arg10); \
4071  __asm__ volatile( \
4072  VALGRIND_ALIGN_STACK \
4073  "ldr r0, [%1, #40] \n\t" \
4074  "push {r0} \n\t" \
4075  "ldr r0, [%1, #20] \n\t" \
4076  "ldr r1, [%1, #24] \n\t" \
4077  "ldr r2, [%1, #28] \n\t" \
4078  "ldr r3, [%1, #32] \n\t" \
4079  "ldr r4, [%1, #36] \n\t" \
4080  "push {r0, r1, r2, r3, r4} \n\t" \
4081  "ldr r0, [%1, #4] \n\t" \
4082  "ldr r1, [%1, #8] \n\t" \
4083  "ldr r2, [%1, #12] \n\t" \
4084  "ldr r3, [%1, #16] \n\t" \
4085  "ldr r4, [%1] \n\t" /* target->r4 */ \
4086  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4087  VALGRIND_RESTORE_STACK \
4088  "mov %0, r0" \
4089  : /*out*/ "=r" (_res) \
4090  : /*in*/ "0" (&_argvec[0]) \
4091  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4092  ); \
4093  lval = (__typeof__(lval)) _res; \
4094  } while (0)
4095 
4096 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4097  arg6,arg7,arg8,arg9,arg10, \
4098  arg11) \
4099  do { \
4100  volatile OrigFn _orig = (orig); \
4101  volatile unsigned long _argvec[12]; \
4102  volatile unsigned long _res; \
4103  _argvec[0] = (unsigned long)_orig.nraddr; \
4104  _argvec[1] = (unsigned long)(arg1); \
4105  _argvec[2] = (unsigned long)(arg2); \
4106  _argvec[3] = (unsigned long)(arg3); \
4107  _argvec[4] = (unsigned long)(arg4); \
4108  _argvec[5] = (unsigned long)(arg5); \
4109  _argvec[6] = (unsigned long)(arg6); \
4110  _argvec[7] = (unsigned long)(arg7); \
4111  _argvec[8] = (unsigned long)(arg8); \
4112  _argvec[9] = (unsigned long)(arg9); \
4113  _argvec[10] = (unsigned long)(arg10); \
4114  _argvec[11] = (unsigned long)(arg11); \
4115  __asm__ volatile( \
4116  VALGRIND_ALIGN_STACK \
4117  "sub sp, sp, #4 \n\t" \
4118  "ldr r0, [%1, #40] \n\t" \
4119  "ldr r1, [%1, #44] \n\t" \
4120  "push {r0, r1} \n\t" \
4121  "ldr r0, [%1, #20] \n\t" \
4122  "ldr r1, [%1, #24] \n\t" \
4123  "ldr r2, [%1, #28] \n\t" \
4124  "ldr r3, [%1, #32] \n\t" \
4125  "ldr r4, [%1, #36] \n\t" \
4126  "push {r0, r1, r2, r3, r4} \n\t" \
4127  "ldr r0, [%1, #4] \n\t" \
4128  "ldr r1, [%1, #8] \n\t" \
4129  "ldr r2, [%1, #12] \n\t" \
4130  "ldr r3, [%1, #16] \n\t" \
4131  "ldr r4, [%1] \n\t" /* target->r4 */ \
4132  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4133  VALGRIND_RESTORE_STACK \
4134  "mov %0, r0" \
4135  : /*out*/ "=r" (_res) \
4136  : /*in*/ "0" (&_argvec[0]) \
4137  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4138  ); \
4139  lval = (__typeof__(lval)) _res; \
4140  } while (0)
4141 
4142 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4143  arg6,arg7,arg8,arg9,arg10, \
4144  arg11,arg12) \
4145  do { \
4146  volatile OrigFn _orig = (orig); \
4147  volatile unsigned long _argvec[13]; \
4148  volatile unsigned long _res; \
4149  _argvec[0] = (unsigned long)_orig.nraddr; \
4150  _argvec[1] = (unsigned long)(arg1); \
4151  _argvec[2] = (unsigned long)(arg2); \
4152  _argvec[3] = (unsigned long)(arg3); \
4153  _argvec[4] = (unsigned long)(arg4); \
4154  _argvec[5] = (unsigned long)(arg5); \
4155  _argvec[6] = (unsigned long)(arg6); \
4156  _argvec[7] = (unsigned long)(arg7); \
4157  _argvec[8] = (unsigned long)(arg8); \
4158  _argvec[9] = (unsigned long)(arg9); \
4159  _argvec[10] = (unsigned long)(arg10); \
4160  _argvec[11] = (unsigned long)(arg11); \
4161  _argvec[12] = (unsigned long)(arg12); \
4162  __asm__ volatile( \
4163  VALGRIND_ALIGN_STACK \
4164  "ldr r0, [%1, #40] \n\t" \
4165  "ldr r1, [%1, #44] \n\t" \
4166  "ldr r2, [%1, #48] \n\t" \
4167  "push {r0, r1, r2} \n\t" \
4168  "ldr r0, [%1, #20] \n\t" \
4169  "ldr r1, [%1, #24] \n\t" \
4170  "ldr r2, [%1, #28] \n\t" \
4171  "ldr r3, [%1, #32] \n\t" \
4172  "ldr r4, [%1, #36] \n\t" \
4173  "push {r0, r1, r2, r3, r4} \n\t" \
4174  "ldr r0, [%1, #4] \n\t" \
4175  "ldr r1, [%1, #8] \n\t" \
4176  "ldr r2, [%1, #12] \n\t" \
4177  "ldr r3, [%1, #16] \n\t" \
4178  "ldr r4, [%1] \n\t" /* target->r4 */ \
4179  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4180  VALGRIND_RESTORE_STACK \
4181  "mov %0, r0" \
4182  : /*out*/ "=r" (_res) \
4183  : /*in*/ "0" (&_argvec[0]) \
4184  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4185  ); \
4186  lval = (__typeof__(lval)) _res; \
4187  } while (0)
4188 
4189 #endif /* PLAT_arm_linux */
4190 
4191 /* ------------------------ arm64-linux ------------------------ */
4192 
4193 #if defined(PLAT_arm64_linux)
4194 
4195 /* These regs are trashed by the hidden call. */
4196 #define __CALLER_SAVED_REGS \
4197  "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4198  "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4199  "x18", "x19", "x20", "x30", \
4200  "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4201  "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4202  "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4203  "v26", "v27", "v28", "v29", "v30", "v31"
4204 
4205 /* x21 is callee-saved, so we can use it to save and restore SP around
4206  the hidden call. */
4207 #define VALGRIND_ALIGN_STACK \
4208  "mov x21, sp\n\t" \
4209  "bic sp, x21, #15\n\t"
4210 #define VALGRIND_RESTORE_STACK \
4211  "mov sp, x21\n\t"
4212 
4213 /* These CALL_FN_ macros assume that on arm64-linux,
4214  sizeof(unsigned long) == 8. */
4215 
4216 #define CALL_FN_W_v(lval, orig) \
4217  do { \
4218  volatile OrigFn _orig = (orig); \
4219  volatile unsigned long _argvec[1]; \
4220  volatile unsigned long _res; \
4221  _argvec[0] = (unsigned long)_orig.nraddr; \
4222  __asm__ volatile( \
4223  VALGRIND_ALIGN_STACK \
4224  "ldr x8, [%1] \n\t" /* target->x8 */ \
4225  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4226  VALGRIND_RESTORE_STACK \
4227  "mov %0, x0\n" \
4228  : /*out*/ "=r" (_res) \
4229  : /*in*/ "0" (&_argvec[0]) \
4230  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4231  ); \
4232  lval = (__typeof__(lval)) _res; \
4233  } while (0)
4234 
4235 #define CALL_FN_W_W(lval, orig, arg1) \
4236  do { \
4237  volatile OrigFn _orig = (orig); \
4238  volatile unsigned long _argvec[2]; \
4239  volatile unsigned long _res; \
4240  _argvec[0] = (unsigned long)_orig.nraddr; \
4241  _argvec[1] = (unsigned long)(arg1); \
4242  __asm__ volatile( \
4243  VALGRIND_ALIGN_STACK \
4244  "ldr x0, [%1, #8] \n\t" \
4245  "ldr x8, [%1] \n\t" /* target->x8 */ \
4246  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4247  VALGRIND_RESTORE_STACK \
4248  "mov %0, x0\n" \
4249  : /*out*/ "=r" (_res) \
4250  : /*in*/ "0" (&_argvec[0]) \
4251  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4252  ); \
4253  lval = (__typeof__(lval)) _res; \
4254  } while (0)
4255 
4256 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4257  do { \
4258  volatile OrigFn _orig = (orig); \
4259  volatile unsigned long _argvec[3]; \
4260  volatile unsigned long _res; \
4261  _argvec[0] = (unsigned long)_orig.nraddr; \
4262  _argvec[1] = (unsigned long)(arg1); \
4263  _argvec[2] = (unsigned long)(arg2); \
4264  __asm__ volatile( \
4265  VALGRIND_ALIGN_STACK \
4266  "ldr x0, [%1, #8] \n\t" \
4267  "ldr x1, [%1, #16] \n\t" \
4268  "ldr x8, [%1] \n\t" /* target->x8 */ \
4269  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4270  VALGRIND_RESTORE_STACK \
4271  "mov %0, x0\n" \
4272  : /*out*/ "=r" (_res) \
4273  : /*in*/ "0" (&_argvec[0]) \
4274  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4275  ); \
4276  lval = (__typeof__(lval)) _res; \
4277  } while (0)
4278 
4279 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4280  do { \
4281  volatile OrigFn _orig = (orig); \
4282  volatile unsigned long _argvec[4]; \
4283  volatile unsigned long _res; \
4284  _argvec[0] = (unsigned long)_orig.nraddr; \
4285  _argvec[1] = (unsigned long)(arg1); \
4286  _argvec[2] = (unsigned long)(arg2); \
4287  _argvec[3] = (unsigned long)(arg3); \
4288  __asm__ volatile( \
4289  VALGRIND_ALIGN_STACK \
4290  "ldr x0, [%1, #8] \n\t" \
4291  "ldr x1, [%1, #16] \n\t" \
4292  "ldr x2, [%1, #24] \n\t" \
4293  "ldr x8, [%1] \n\t" /* target->x8 */ \
4294  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4295  VALGRIND_RESTORE_STACK \
4296  "mov %0, x0\n" \
4297  : /*out*/ "=r" (_res) \
4298  : /*in*/ "0" (&_argvec[0]) \
4299  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4300  ); \
4301  lval = (__typeof__(lval)) _res; \
4302  } while (0)
4303 
4304 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4305  do { \
4306  volatile OrigFn _orig = (orig); \
4307  volatile unsigned long _argvec[5]; \
4308  volatile unsigned long _res; \
4309  _argvec[0] = (unsigned long)_orig.nraddr; \
4310  _argvec[1] = (unsigned long)(arg1); \
4311  _argvec[2] = (unsigned long)(arg2); \
4312  _argvec[3] = (unsigned long)(arg3); \
4313  _argvec[4] = (unsigned long)(arg4); \
4314  __asm__ volatile( \
4315  VALGRIND_ALIGN_STACK \
4316  "ldr x0, [%1, #8] \n\t" \
4317  "ldr x1, [%1, #16] \n\t" \
4318  "ldr x2, [%1, #24] \n\t" \
4319  "ldr x3, [%1, #32] \n\t" \
4320  "ldr x8, [%1] \n\t" /* target->x8 */ \
4321  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4322  VALGRIND_RESTORE_STACK \
4323  "mov %0, x0" \
4324  : /*out*/ "=r" (_res) \
4325  : /*in*/ "0" (&_argvec[0]) \
4326  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4327  ); \
4328  lval = (__typeof__(lval)) _res; \
4329  } while (0)
4330 
4331 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4332  do { \
4333  volatile OrigFn _orig = (orig); \
4334  volatile unsigned long _argvec[6]; \
4335  volatile unsigned long _res; \
4336  _argvec[0] = (unsigned long)_orig.nraddr; \
4337  _argvec[1] = (unsigned long)(arg1); \
4338  _argvec[2] = (unsigned long)(arg2); \
4339  _argvec[3] = (unsigned long)(arg3); \
4340  _argvec[4] = (unsigned long)(arg4); \
4341  _argvec[5] = (unsigned long)(arg5); \
4342  __asm__ volatile( \
4343  VALGRIND_ALIGN_STACK \
4344  "ldr x0, [%1, #8] \n\t" \
4345  "ldr x1, [%1, #16] \n\t" \
4346  "ldr x2, [%1, #24] \n\t" \
4347  "ldr x3, [%1, #32] \n\t" \
4348  "ldr x4, [%1, #40] \n\t" \
4349  "ldr x8, [%1] \n\t" /* target->x8 */ \
4350  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4351  VALGRIND_RESTORE_STACK \
4352  "mov %0, x0" \
4353  : /*out*/ "=r" (_res) \
4354  : /*in*/ "0" (&_argvec[0]) \
4355  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4356  ); \
4357  lval = (__typeof__(lval)) _res; \
4358  } while (0)
4359 
4360 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4361  do { \
4362  volatile OrigFn _orig = (orig); \
4363  volatile unsigned long _argvec[7]; \
4364  volatile unsigned long _res; \
4365  _argvec[0] = (unsigned long)_orig.nraddr; \
4366  _argvec[1] = (unsigned long)(arg1); \
4367  _argvec[2] = (unsigned long)(arg2); \
4368  _argvec[3] = (unsigned long)(arg3); \
4369  _argvec[4] = (unsigned long)(arg4); \
4370  _argvec[5] = (unsigned long)(arg5); \
4371  _argvec[6] = (unsigned long)(arg6); \
4372  __asm__ volatile( \
4373  VALGRIND_ALIGN_STACK \
4374  "ldr x0, [%1, #8] \n\t" \
4375  "ldr x1, [%1, #16] \n\t" \
4376  "ldr x2, [%1, #24] \n\t" \
4377  "ldr x3, [%1, #32] \n\t" \
4378  "ldr x4, [%1, #40] \n\t" \
4379  "ldr x5, [%1, #48] \n\t" \
4380  "ldr x8, [%1] \n\t" /* target->x8 */ \
4381  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4382  VALGRIND_RESTORE_STACK \
4383  "mov %0, x0" \
4384  : /*out*/ "=r" (_res) \
4385  : /*in*/ "0" (&_argvec[0]) \
4386  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4387  ); \
4388  lval = (__typeof__(lval)) _res; \
4389  } while (0)
4390 
4391 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4392  arg7) \
4393  do { \
4394  volatile OrigFn _orig = (orig); \
4395  volatile unsigned long _argvec[8]; \
4396  volatile unsigned long _res; \
4397  _argvec[0] = (unsigned long)_orig.nraddr; \
4398  _argvec[1] = (unsigned long)(arg1); \
4399  _argvec[2] = (unsigned long)(arg2); \
4400  _argvec[3] = (unsigned long)(arg3); \
4401  _argvec[4] = (unsigned long)(arg4); \
4402  _argvec[5] = (unsigned long)(arg5); \
4403  _argvec[6] = (unsigned long)(arg6); \
4404  _argvec[7] = (unsigned long)(arg7); \
4405  __asm__ volatile( \
4406  VALGRIND_ALIGN_STACK \
4407  "ldr x0, [%1, #8] \n\t" \
4408  "ldr x1, [%1, #16] \n\t" \
4409  "ldr x2, [%1, #24] \n\t" \
4410  "ldr x3, [%1, #32] \n\t" \
4411  "ldr x4, [%1, #40] \n\t" \
4412  "ldr x5, [%1, #48] \n\t" \
4413  "ldr x6, [%1, #56] \n\t" \
4414  "ldr x8, [%1] \n\t" /* target->x8 */ \
4415  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4416  VALGRIND_RESTORE_STACK \
4417  "mov %0, x0" \
4418  : /*out*/ "=r" (_res) \
4419  : /*in*/ "0" (&_argvec[0]) \
4420  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4421  ); \
4422  lval = (__typeof__(lval)) _res; \
4423  } while (0)
4424 
4425 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4426  arg7,arg8) \
4427  do { \
4428  volatile OrigFn _orig = (orig); \
4429  volatile unsigned long _argvec[9]; \
4430  volatile unsigned long _res; \
4431  _argvec[0] = (unsigned long)_orig.nraddr; \
4432  _argvec[1] = (unsigned long)(arg1); \
4433  _argvec[2] = (unsigned long)(arg2); \
4434  _argvec[3] = (unsigned long)(arg3); \
4435  _argvec[4] = (unsigned long)(arg4); \
4436  _argvec[5] = (unsigned long)(arg5); \
4437  _argvec[6] = (unsigned long)(arg6); \
4438  _argvec[7] = (unsigned long)(arg7); \
4439  _argvec[8] = (unsigned long)(arg8); \
4440  __asm__ volatile( \
4441  VALGRIND_ALIGN_STACK \
4442  "ldr x0, [%1, #8] \n\t" \
4443  "ldr x1, [%1, #16] \n\t" \
4444  "ldr x2, [%1, #24] \n\t" \
4445  "ldr x3, [%1, #32] \n\t" \
4446  "ldr x4, [%1, #40] \n\t" \
4447  "ldr x5, [%1, #48] \n\t" \
4448  "ldr x6, [%1, #56] \n\t" \
4449  "ldr x7, [%1, #64] \n\t" \
4450  "ldr x8, [%1] \n\t" /* target->x8 */ \
4451  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4452  VALGRIND_RESTORE_STACK \
4453  "mov %0, x0" \
4454  : /*out*/ "=r" (_res) \
4455  : /*in*/ "0" (&_argvec[0]) \
4456  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4457  ); \
4458  lval = (__typeof__(lval)) _res; \
4459  } while (0)
4460 
4461 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4462  arg7,arg8,arg9) \
4463  do { \
4464  volatile OrigFn _orig = (orig); \
4465  volatile unsigned long _argvec[10]; \
4466  volatile unsigned long _res; \
4467  _argvec[0] = (unsigned long)_orig.nraddr; \
4468  _argvec[1] = (unsigned long)(arg1); \
4469  _argvec[2] = (unsigned long)(arg2); \
4470  _argvec[3] = (unsigned long)(arg3); \
4471  _argvec[4] = (unsigned long)(arg4); \
4472  _argvec[5] = (unsigned long)(arg5); \
4473  _argvec[6] = (unsigned long)(arg6); \
4474  _argvec[7] = (unsigned long)(arg7); \
4475  _argvec[8] = (unsigned long)(arg8); \
4476  _argvec[9] = (unsigned long)(arg9); \
4477  __asm__ volatile( \
4478  VALGRIND_ALIGN_STACK \
4479  "sub sp, sp, #0x20 \n\t" \
4480  "ldr x0, [%1, #8] \n\t" \
4481  "ldr x1, [%1, #16] \n\t" \
4482  "ldr x2, [%1, #24] \n\t" \
4483  "ldr x3, [%1, #32] \n\t" \
4484  "ldr x4, [%1, #40] \n\t" \
4485  "ldr x5, [%1, #48] \n\t" \
4486  "ldr x6, [%1, #56] \n\t" \
4487  "ldr x7, [%1, #64] \n\t" \
4488  "ldr x8, [%1, #72] \n\t" \
4489  "str x8, [sp, #0] \n\t" \
4490  "ldr x8, [%1] \n\t" /* target->x8 */ \
4491  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4492  VALGRIND_RESTORE_STACK \
4493  "mov %0, x0" \
4494  : /*out*/ "=r" (_res) \
4495  : /*in*/ "0" (&_argvec[0]) \
4496  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4497  ); \
4498  lval = (__typeof__(lval)) _res; \
4499  } while (0)
4500 
4501 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4502  arg7,arg8,arg9,arg10) \
4503  do { \
4504  volatile OrigFn _orig = (orig); \
4505  volatile unsigned long _argvec[11]; \
4506  volatile unsigned long _res; \
4507  _argvec[0] = (unsigned long)_orig.nraddr; \
4508  _argvec[1] = (unsigned long)(arg1); \
4509  _argvec[2] = (unsigned long)(arg2); \
4510  _argvec[3] = (unsigned long)(arg3); \
4511  _argvec[4] = (unsigned long)(arg4); \
4512  _argvec[5] = (unsigned long)(arg5); \
4513  _argvec[6] = (unsigned long)(arg6); \
4514  _argvec[7] = (unsigned long)(arg7); \
4515  _argvec[8] = (unsigned long)(arg8); \
4516  _argvec[9] = (unsigned long)(arg9); \
4517  _argvec[10] = (unsigned long)(arg10); \
4518  __asm__ volatile( \
4519  VALGRIND_ALIGN_STACK \
4520  "sub sp, sp, #0x20 \n\t" \
4521  "ldr x0, [%1, #8] \n\t" \
4522  "ldr x1, [%1, #16] \n\t" \
4523  "ldr x2, [%1, #24] \n\t" \
4524  "ldr x3, [%1, #32] \n\t" \
4525  "ldr x4, [%1, #40] \n\t" \
4526  "ldr x5, [%1, #48] \n\t" \
4527  "ldr x6, [%1, #56] \n\t" \
4528  "ldr x7, [%1, #64] \n\t" \
4529  "ldr x8, [%1, #72] \n\t" \
4530  "str x8, [sp, #0] \n\t" \
4531  "ldr x8, [%1, #80] \n\t" \
4532  "str x8, [sp, #8] \n\t" \
4533  "ldr x8, [%1] \n\t" /* target->x8 */ \
4534  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4535  VALGRIND_RESTORE_STACK \
4536  "mov %0, x0" \
4537  : /*out*/ "=r" (_res) \
4538  : /*in*/ "0" (&_argvec[0]) \
4539  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4540  ); \
4541  lval = (__typeof__(lval)) _res; \
4542  } while (0)
4543 
4544 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4545  arg7,arg8,arg9,arg10,arg11) \
4546  do { \
4547  volatile OrigFn _orig = (orig); \
4548  volatile unsigned long _argvec[12]; \
4549  volatile unsigned long _res; \
4550  _argvec[0] = (unsigned long)_orig.nraddr; \
4551  _argvec[1] = (unsigned long)(arg1); \
4552  _argvec[2] = (unsigned long)(arg2); \
4553  _argvec[3] = (unsigned long)(arg3); \
4554  _argvec[4] = (unsigned long)(arg4); \
4555  _argvec[5] = (unsigned long)(arg5); \
4556  _argvec[6] = (unsigned long)(arg6); \
4557  _argvec[7] = (unsigned long)(arg7); \
4558  _argvec[8] = (unsigned long)(arg8); \
4559  _argvec[9] = (unsigned long)(arg9); \
4560  _argvec[10] = (unsigned long)(arg10); \
4561  _argvec[11] = (unsigned long)(arg11); \
4562  __asm__ volatile( \
4563  VALGRIND_ALIGN_STACK \
4564  "sub sp, sp, #0x30 \n\t" \
4565  "ldr x0, [%1, #8] \n\t" \
4566  "ldr x1, [%1, #16] \n\t" \
4567  "ldr x2, [%1, #24] \n\t" \
4568  "ldr x3, [%1, #32] \n\t" \
4569  "ldr x4, [%1, #40] \n\t" \
4570  "ldr x5, [%1, #48] \n\t" \
4571  "ldr x6, [%1, #56] \n\t" \
4572  "ldr x7, [%1, #64] \n\t" \
4573  "ldr x8, [%1, #72] \n\t" \
4574  "str x8, [sp, #0] \n\t" \
4575  "ldr x8, [%1, #80] \n\t" \
4576  "str x8, [sp, #8] \n\t" \
4577  "ldr x8, [%1, #88] \n\t" \
4578  "str x8, [sp, #16] \n\t" \
4579  "ldr x8, [%1] \n\t" /* target->x8 */ \
4580  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4581  VALGRIND_RESTORE_STACK \
4582  "mov %0, x0" \
4583  : /*out*/ "=r" (_res) \
4584  : /*in*/ "0" (&_argvec[0]) \
4585  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4586  ); \
4587  lval = (__typeof__(lval)) _res; \
4588  } while (0)
4589 
4590 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4591  arg7,arg8,arg9,arg10,arg11, \
4592  arg12) \
4593  do { \
4594  volatile OrigFn _orig = (orig); \
4595  volatile unsigned long _argvec[13]; \
4596  volatile unsigned long _res; \
4597  _argvec[0] = (unsigned long)_orig.nraddr; \
4598  _argvec[1] = (unsigned long)(arg1); \
4599  _argvec[2] = (unsigned long)(arg2); \
4600  _argvec[3] = (unsigned long)(arg3); \
4601  _argvec[4] = (unsigned long)(arg4); \
4602  _argvec[5] = (unsigned long)(arg5); \
4603  _argvec[6] = (unsigned long)(arg6); \
4604  _argvec[7] = (unsigned long)(arg7); \
4605  _argvec[8] = (unsigned long)(arg8); \
4606  _argvec[9] = (unsigned long)(arg9); \
4607  _argvec[10] = (unsigned long)(arg10); \
4608  _argvec[11] = (unsigned long)(arg11); \
4609  _argvec[12] = (unsigned long)(arg12); \
4610  __asm__ volatile( \
4611  VALGRIND_ALIGN_STACK \
4612  "sub sp, sp, #0x30 \n\t" \
4613  "ldr x0, [%1, #8] \n\t" \
4614  "ldr x1, [%1, #16] \n\t" \
4615  "ldr x2, [%1, #24] \n\t" \
4616  "ldr x3, [%1, #32] \n\t" \
4617  "ldr x4, [%1, #40] \n\t" \
4618  "ldr x5, [%1, #48] \n\t" \
4619  "ldr x6, [%1, #56] \n\t" \
4620  "ldr x7, [%1, #64] \n\t" \
4621  "ldr x8, [%1, #72] \n\t" \
4622  "str x8, [sp, #0] \n\t" \
4623  "ldr x8, [%1, #80] \n\t" \
4624  "str x8, [sp, #8] \n\t" \
4625  "ldr x8, [%1, #88] \n\t" \
4626  "str x8, [sp, #16] \n\t" \
4627  "ldr x8, [%1, #96] \n\t" \
4628  "str x8, [sp, #24] \n\t" \
4629  "ldr x8, [%1] \n\t" /* target->x8 */ \
4630  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4631  VALGRIND_RESTORE_STACK \
4632  "mov %0, x0" \
4633  : /*out*/ "=r" (_res) \
4634  : /*in*/ "0" (&_argvec[0]) \
4635  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4636  ); \
4637  lval = (__typeof__(lval)) _res; \
4638  } while (0)
4639 
4640 #endif /* PLAT_arm64_linux */
4641 
4642 /* ------------------------- s390x-linux ------------------------- */
4643 
4644 #if defined(PLAT_s390x_linux)
4645 
4646 /* Similar workaround as amd64 (see above), but we use r11 as frame
4647  pointer and save the old r11 in r7. r11 might be used for
4648  argvec, therefore we copy argvec in r1 since r1 is clobbered
4649  after the call anyway. */
4650 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4651 # define __FRAME_POINTER \
4652  ,"d"(__builtin_dwarf_cfa())
4653 # define VALGRIND_CFI_PROLOGUE \
4654  ".cfi_remember_state\n\t" \
4655  "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4656  "lgr 7,11\n\t" \
4657  "lgr 11,%2\n\t" \
4658  ".cfi_def_cfa r11, 0\n\t"
4659 # define VALGRIND_CFI_EPILOGUE \
4660  "lgr 11, 7\n\t" \
4661  ".cfi_restore_state\n\t"
4662 #else
4663 # define __FRAME_POINTER
4664 # define VALGRIND_CFI_PROLOGUE \
4665  "lgr 1,%1\n\t"
4666 # define VALGRIND_CFI_EPILOGUE
4667 #endif
4668 
4669 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4670  according to the s390 GCC maintainer. (The ABI specification is not
4671  precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4672  VALGRIND_RESTORE_STACK are not defined here. */
4673 
4674 /* These regs are trashed by the hidden call. Note that we overwrite
4675  r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4676  function a proper return address. All others are ABI defined call
4677  clobbers. */
4678 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4679  "f0","f1","f2","f3","f4","f5","f6","f7"
4680 
4681 /* Nb: Although r11 is modified in the asm snippets below (inside
4682  VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4683  two reasons:
4684  (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4685  modified
4686  (2) GCC will complain that r11 cannot appear inside a clobber section,
4687  when compiled with -O -fno-omit-frame-pointer
4688  */
4689 
4690 #define CALL_FN_W_v(lval, orig) \
4691  do { \
4692  volatile OrigFn _orig = (orig); \
4693  volatile unsigned long _argvec[1]; \
4694  volatile unsigned long _res; \
4695  _argvec[0] = (unsigned long)_orig.nraddr; \
4696  __asm__ volatile( \
4697  VALGRIND_CFI_PROLOGUE \
4698  "aghi 15,-160\n\t" \
4699  "lg 1, 0(1)\n\t" /* target->r1 */ \
4700  VALGRIND_CALL_NOREDIR_R1 \
4701  "lgr %0, 2\n\t" \
4702  "aghi 15,160\n\t" \
4703  VALGRIND_CFI_EPILOGUE \
4704  : /*out*/ "=d" (_res) \
4705  : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4706  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4707  ); \
4708  lval = (__typeof__(lval)) _res; \
4709  } while (0)
4710 
4711 /* The call abi has the arguments in r2-r6 and stack */
4712 #define CALL_FN_W_W(lval, orig, arg1) \
4713  do { \
4714  volatile OrigFn _orig = (orig); \
4715  volatile unsigned long _argvec[2]; \
4716  volatile unsigned long _res; \
4717  _argvec[0] = (unsigned long)_orig.nraddr; \
4718  _argvec[1] = (unsigned long)arg1; \
4719  __asm__ volatile( \
4720  VALGRIND_CFI_PROLOGUE \
4721  "aghi 15,-160\n\t" \
4722  "lg 2, 8(1)\n\t" \
4723  "lg 1, 0(1)\n\t" \
4724  VALGRIND_CALL_NOREDIR_R1 \
4725  "lgr %0, 2\n\t" \
4726  "aghi 15,160\n\t" \
4727  VALGRIND_CFI_EPILOGUE \
4728  : /*out*/ "=d" (_res) \
4729  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4730  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4731  ); \
4732  lval = (__typeof__(lval)) _res; \
4733  } while (0)
4734 
4735 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4736  do { \
4737  volatile OrigFn _orig = (orig); \
4738  volatile unsigned long _argvec[3]; \
4739  volatile unsigned long _res; \
4740  _argvec[0] = (unsigned long)_orig.nraddr; \
4741  _argvec[1] = (unsigned long)arg1; \
4742  _argvec[2] = (unsigned long)arg2; \
4743  __asm__ volatile( \
4744  VALGRIND_CFI_PROLOGUE \
4745  "aghi 15,-160\n\t" \
4746  "lg 2, 8(1)\n\t" \
4747  "lg 3,16(1)\n\t" \
4748  "lg 1, 0(1)\n\t" \
4749  VALGRIND_CALL_NOREDIR_R1 \
4750  "lgr %0, 2\n\t" \
4751  "aghi 15,160\n\t" \
4752  VALGRIND_CFI_EPILOGUE \
4753  : /*out*/ "=d" (_res) \
4754  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4755  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4756  ); \
4757  lval = (__typeof__(lval)) _res; \
4758  } while (0)
4759 
4760 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4761  do { \
4762  volatile OrigFn _orig = (orig); \
4763  volatile unsigned long _argvec[4]; \
4764  volatile unsigned long _res; \
4765  _argvec[0] = (unsigned long)_orig.nraddr; \
4766  _argvec[1] = (unsigned long)arg1; \
4767  _argvec[2] = (unsigned long)arg2; \
4768  _argvec[3] = (unsigned long)arg3; \
4769  __asm__ volatile( \
4770  VALGRIND_CFI_PROLOGUE \
4771  "aghi 15,-160\n\t" \
4772  "lg 2, 8(1)\n\t" \
4773  "lg 3,16(1)\n\t" \
4774  "lg 4,24(1)\n\t" \
4775  "lg 1, 0(1)\n\t" \
4776  VALGRIND_CALL_NOREDIR_R1 \
4777  "lgr %0, 2\n\t" \
4778  "aghi 15,160\n\t" \
4779  VALGRIND_CFI_EPILOGUE \
4780  : /*out*/ "=d" (_res) \
4781  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4782  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4783  ); \
4784  lval = (__typeof__(lval)) _res; \
4785  } while (0)
4786 
4787 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4788  do { \
4789  volatile OrigFn _orig = (orig); \
4790  volatile unsigned long _argvec[5]; \
4791  volatile unsigned long _res; \
4792  _argvec[0] = (unsigned long)_orig.nraddr; \
4793  _argvec[1] = (unsigned long)arg1; \
4794  _argvec[2] = (unsigned long)arg2; \
4795  _argvec[3] = (unsigned long)arg3; \
4796  _argvec[4] = (unsigned long)arg4; \
4797  __asm__ volatile( \
4798  VALGRIND_CFI_PROLOGUE \
4799  "aghi 15,-160\n\t" \
4800  "lg 2, 8(1)\n\t" \
4801  "lg 3,16(1)\n\t" \
4802  "lg 4,24(1)\n\t" \
4803  "lg 5,32(1)\n\t" \
4804  "lg 1, 0(1)\n\t" \
4805  VALGRIND_CALL_NOREDIR_R1 \
4806  "lgr %0, 2\n\t" \
4807  "aghi 15,160\n\t" \
4808  VALGRIND_CFI_EPILOGUE \
4809  : /*out*/ "=d" (_res) \
4810  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4811  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4812  ); \
4813  lval = (__typeof__(lval)) _res; \
4814  } while (0)
4815 
4816 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4817  do { \
4818  volatile OrigFn _orig = (orig); \
4819  volatile unsigned long _argvec[6]; \
4820  volatile unsigned long _res; \
4821  _argvec[0] = (unsigned long)_orig.nraddr; \
4822  _argvec[1] = (unsigned long)arg1; \
4823  _argvec[2] = (unsigned long)arg2; \
4824  _argvec[3] = (unsigned long)arg3; \
4825  _argvec[4] = (unsigned long)arg4; \
4826  _argvec[5] = (unsigned long)arg5; \
4827  __asm__ volatile( \
4828  VALGRIND_CFI_PROLOGUE \
4829  "aghi 15,-160\n\t" \
4830  "lg 2, 8(1)\n\t" \
4831  "lg 3,16(1)\n\t" \
4832  "lg 4,24(1)\n\t" \
4833  "lg 5,32(1)\n\t" \
4834  "lg 6,40(1)\n\t" \
4835  "lg 1, 0(1)\n\t" \
4836  VALGRIND_CALL_NOREDIR_R1 \
4837  "lgr %0, 2\n\t" \
4838  "aghi 15,160\n\t" \
4839  VALGRIND_CFI_EPILOGUE \
4840  : /*out*/ "=d" (_res) \
4841  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4842  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4843  ); \
4844  lval = (__typeof__(lval)) _res; \
4845  } while (0)
4846 
4847 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4848  arg6) \
4849  do { \
4850  volatile OrigFn _orig = (orig); \
4851  volatile unsigned long _argvec[7]; \
4852  volatile unsigned long _res; \
4853  _argvec[0] = (unsigned long)_orig.nraddr; \
4854  _argvec[1] = (unsigned long)arg1; \
4855  _argvec[2] = (unsigned long)arg2; \
4856  _argvec[3] = (unsigned long)arg3; \
4857  _argvec[4] = (unsigned long)arg4; \
4858  _argvec[5] = (unsigned long)arg5; \
4859  _argvec[6] = (unsigned long)arg6; \
4860  __asm__ volatile( \
4861  VALGRIND_CFI_PROLOGUE \
4862  "aghi 15,-168\n\t" \
4863  "lg 2, 8(1)\n\t" \
4864  "lg 3,16(1)\n\t" \
4865  "lg 4,24(1)\n\t" \
4866  "lg 5,32(1)\n\t" \
4867  "lg 6,40(1)\n\t" \
4868  "mvc 160(8,15), 48(1)\n\t" \
4869  "lg 1, 0(1)\n\t" \
4870  VALGRIND_CALL_NOREDIR_R1 \
4871  "lgr %0, 2\n\t" \
4872  "aghi 15,168\n\t" \
4873  VALGRIND_CFI_EPILOGUE \
4874  : /*out*/ "=d" (_res) \
4875  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4876  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4877  ); \
4878  lval = (__typeof__(lval)) _res; \
4879  } while (0)
4880 
4881 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4882  arg6, arg7) \
4883  do { \
4884  volatile OrigFn _orig = (orig); \
4885  volatile unsigned long _argvec[8]; \
4886  volatile unsigned long _res; \
4887  _argvec[0] = (unsigned long)_orig.nraddr; \
4888  _argvec[1] = (unsigned long)arg1; \
4889  _argvec[2] = (unsigned long)arg2; \
4890  _argvec[3] = (unsigned long)arg3; \
4891  _argvec[4] = (unsigned long)arg4; \
4892  _argvec[5] = (unsigned long)arg5; \
4893  _argvec[6] = (unsigned long)arg6; \
4894  _argvec[7] = (unsigned long)arg7; \
4895  __asm__ volatile( \
4896  VALGRIND_CFI_PROLOGUE \
4897  "aghi 15,-176\n\t" \
4898  "lg 2, 8(1)\n\t" \
4899  "lg 3,16(1)\n\t" \
4900  "lg 4,24(1)\n\t" \
4901  "lg 5,32(1)\n\t" \
4902  "lg 6,40(1)\n\t" \
4903  "mvc 160(8,15), 48(1)\n\t" \
4904  "mvc 168(8,15), 56(1)\n\t" \
4905  "lg 1, 0(1)\n\t" \
4906  VALGRIND_CALL_NOREDIR_R1 \
4907  "lgr %0, 2\n\t" \
4908  "aghi 15,176\n\t" \
4909  VALGRIND_CFI_EPILOGUE \
4910  : /*out*/ "=d" (_res) \
4911  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4912  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4913  ); \
4914  lval = (__typeof__(lval)) _res; \
4915  } while (0)
4916 
4917 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4918  arg6, arg7 ,arg8) \
4919  do { \
4920  volatile OrigFn _orig = (orig); \
4921  volatile unsigned long _argvec[9]; \
4922  volatile unsigned long _res; \
4923  _argvec[0] = (unsigned long)_orig.nraddr; \
4924  _argvec[1] = (unsigned long)arg1; \
4925  _argvec[2] = (unsigned long)arg2; \
4926  _argvec[3] = (unsigned long)arg3; \
4927  _argvec[4] = (unsigned long)arg4; \
4928  _argvec[5] = (unsigned long)arg5; \
4929  _argvec[6] = (unsigned long)arg6; \
4930  _argvec[7] = (unsigned long)arg7; \
4931  _argvec[8] = (unsigned long)arg8; \
4932  __asm__ volatile( \
4933  VALGRIND_CFI_PROLOGUE \
4934  "aghi 15,-184\n\t" \
4935  "lg 2, 8(1)\n\t" \
4936  "lg 3,16(1)\n\t" \
4937  "lg 4,24(1)\n\t" \
4938  "lg 5,32(1)\n\t" \
4939  "lg 6,40(1)\n\t" \
4940  "mvc 160(8,15), 48(1)\n\t" \
4941  "mvc 168(8,15), 56(1)\n\t" \
4942  "mvc 176(8,15), 64(1)\n\t" \
4943  "lg 1, 0(1)\n\t" \
4944  VALGRIND_CALL_NOREDIR_R1 \
4945  "lgr %0, 2\n\t" \
4946  "aghi 15,184\n\t" \
4947  VALGRIND_CFI_EPILOGUE \
4948  : /*out*/ "=d" (_res) \
4949  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4950  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4951  ); \
4952  lval = (__typeof__(lval)) _res; \
4953  } while (0)
4954 
4955 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4956  arg6, arg7 ,arg8, arg9) \
4957  do { \
4958  volatile OrigFn _orig = (orig); \
4959  volatile unsigned long _argvec[10]; \
4960  volatile unsigned long _res; \
4961  _argvec[0] = (unsigned long)_orig.nraddr; \
4962  _argvec[1] = (unsigned long)arg1; \
4963  _argvec[2] = (unsigned long)arg2; \
4964  _argvec[3] = (unsigned long)arg3; \
4965  _argvec[4] = (unsigned long)arg4; \
4966  _argvec[5] = (unsigned long)arg5; \
4967  _argvec[6] = (unsigned long)arg6; \
4968  _argvec[7] = (unsigned long)arg7; \
4969  _argvec[8] = (unsigned long)arg8; \
4970  _argvec[9] = (unsigned long)arg9; \
4971  __asm__ volatile( \
4972  VALGRIND_CFI_PROLOGUE \
4973  "aghi 15,-192\n\t" \
4974  "lg 2, 8(1)\n\t" \
4975  "lg 3,16(1)\n\t" \
4976  "lg 4,24(1)\n\t" \
4977  "lg 5,32(1)\n\t" \
4978  "lg 6,40(1)\n\t" \
4979  "mvc 160(8,15), 48(1)\n\t" \
4980  "mvc 168(8,15), 56(1)\n\t" \
4981  "mvc 176(8,15), 64(1)\n\t" \
4982  "mvc 184(8,15), 72(1)\n\t" \
4983  "lg 1, 0(1)\n\t" \
4984  VALGRIND_CALL_NOREDIR_R1 \
4985  "lgr %0, 2\n\t" \
4986  "aghi 15,192\n\t" \
4987  VALGRIND_CFI_EPILOGUE \
4988  : /*out*/ "=d" (_res) \
4989  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4990  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4991  ); \
4992  lval = (__typeof__(lval)) _res; \
4993  } while (0)
4994 
4995 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4996  arg6, arg7 ,arg8, arg9, arg10) \
4997  do { \
4998  volatile OrigFn _orig = (orig); \
4999  volatile unsigned long _argvec[11]; \
5000  volatile unsigned long _res; \
5001  _argvec[0] = (unsigned long)_orig.nraddr; \
5002  _argvec[1] = (unsigned long)arg1; \
5003  _argvec[2] = (unsigned long)arg2; \
5004  _argvec[3] = (unsigned long)arg3; \
5005  _argvec[4] = (unsigned long)arg4; \
5006  _argvec[5] = (unsigned long)arg5; \
5007  _argvec[6] = (unsigned long)arg6; \
5008  _argvec[7] = (unsigned long)arg7; \
5009  _argvec[8] = (unsigned long)arg8; \
5010  _argvec[9] = (unsigned long)arg9; \
5011  _argvec[10] = (unsigned long)arg10; \
5012  __asm__ volatile( \
5013  VALGRIND_CFI_PROLOGUE \
5014  "aghi 15,-200\n\t" \
5015  "lg 2, 8(1)\n\t" \
5016  "lg 3,16(1)\n\t" \
5017  "lg 4,24(1)\n\t" \
5018  "lg 5,32(1)\n\t" \
5019  "lg 6,40(1)\n\t" \
5020  "mvc 160(8,15), 48(1)\n\t" \
5021  "mvc 168(8,15), 56(1)\n\t" \
5022  "mvc 176(8,15), 64(1)\n\t" \
5023  "mvc 184(8,15), 72(1)\n\t" \
5024  "mvc 192(8,15), 80(1)\n\t" \
5025  "lg 1, 0(1)\n\t" \
5026  VALGRIND_CALL_NOREDIR_R1 \
5027  "lgr %0, 2\n\t" \
5028  "aghi 15,200\n\t" \
5029  VALGRIND_CFI_EPILOGUE \
5030  : /*out*/ "=d" (_res) \
5031  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5032  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5033  ); \
5034  lval = (__typeof__(lval)) _res; \
5035  } while (0)
5036 
5037 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5038  arg6, arg7 ,arg8, arg9, arg10, arg11) \
5039  do { \
5040  volatile OrigFn _orig = (orig); \
5041  volatile unsigned long _argvec[12]; \
5042  volatile unsigned long _res; \
5043  _argvec[0] = (unsigned long)_orig.nraddr; \
5044  _argvec[1] = (unsigned long)arg1; \
5045  _argvec[2] = (unsigned long)arg2; \
5046  _argvec[3] = (unsigned long)arg3; \
5047  _argvec[4] = (unsigned long)arg4; \
5048  _argvec[5] = (unsigned long)arg5; \
5049  _argvec[6] = (unsigned long)arg6; \
5050  _argvec[7] = (unsigned long)arg7; \
5051  _argvec[8] = (unsigned long)arg8; \
5052  _argvec[9] = (unsigned long)arg9; \
5053  _argvec[10] = (unsigned long)arg10; \
5054  _argvec[11] = (unsigned long)arg11; \
5055  __asm__ volatile( \
5056  VALGRIND_CFI_PROLOGUE \
5057  "aghi 15,-208\n\t" \
5058  "lg 2, 8(1)\n\t" \
5059  "lg 3,16(1)\n\t" \
5060  "lg 4,24(1)\n\t" \
5061  "lg 5,32(1)\n\t" \
5062  "lg 6,40(1)\n\t" \
5063  "mvc 160(8,15), 48(1)\n\t" \
5064  "mvc 168(8,15), 56(1)\n\t" \
5065  "mvc 176(8,15), 64(1)\n\t" \
5066  "mvc 184(8,15), 72(1)\n\t" \
5067  "mvc 192(8,15), 80(1)\n\t" \
5068  "mvc 200(8,15), 88(1)\n\t" \
5069  "lg 1, 0(1)\n\t" \
5070  VALGRIND_CALL_NOREDIR_R1 \
5071  "lgr %0, 2\n\t" \
5072  "aghi 15,208\n\t" \
5073  VALGRIND_CFI_EPILOGUE \
5074  : /*out*/ "=d" (_res) \
5075  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5076  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5077  ); \
5078  lval = (__typeof__(lval)) _res; \
5079  } while (0)
5080 
5081 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5082  arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5083  do { \
5084  volatile OrigFn _orig = (orig); \
5085  volatile unsigned long _argvec[13]; \
5086  volatile unsigned long _res; \
5087  _argvec[0] = (unsigned long)_orig.nraddr; \
5088  _argvec[1] = (unsigned long)arg1; \
5089  _argvec[2] = (unsigned long)arg2; \
5090  _argvec[3] = (unsigned long)arg3; \
5091  _argvec[4] = (unsigned long)arg4; \
5092  _argvec[5] = (unsigned long)arg5; \
5093  _argvec[6] = (unsigned long)arg6; \
5094  _argvec[7] = (unsigned long)arg7; \
5095  _argvec[8] = (unsigned long)arg8; \
5096  _argvec[9] = (unsigned long)arg9; \
5097  _argvec[10] = (unsigned long)arg10; \
5098  _argvec[11] = (unsigned long)arg11; \
5099  _argvec[12] = (unsigned long)arg12; \
5100  __asm__ volatile( \
5101  VALGRIND_CFI_PROLOGUE \
5102  "aghi 15,-216\n\t" \
5103  "lg 2, 8(1)\n\t" \
5104  "lg 3,16(1)\n\t" \
5105  "lg 4,24(1)\n\t" \
5106  "lg 5,32(1)\n\t" \
5107  "lg 6,40(1)\n\t" \
5108  "mvc 160(8,15), 48(1)\n\t" \
5109  "mvc 168(8,15), 56(1)\n\t" \
5110  "mvc 176(8,15), 64(1)\n\t" \
5111  "mvc 184(8,15), 72(1)\n\t" \
5112  "mvc 192(8,15), 80(1)\n\t" \
5113  "mvc 200(8,15), 88(1)\n\t" \
5114  "mvc 208(8,15), 96(1)\n\t" \
5115  "lg 1, 0(1)\n\t" \
5116  VALGRIND_CALL_NOREDIR_R1 \
5117  "lgr %0, 2\n\t" \
5118  "aghi 15,216\n\t" \
5119  VALGRIND_CFI_EPILOGUE \
5120  : /*out*/ "=d" (_res) \
5121  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5122  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5123  ); \
5124  lval = (__typeof__(lval)) _res; \
5125  } while (0)
5126 
5127 
5128 #endif /* PLAT_s390x_linux */
5129 
5130 /* ------------------------- mips32-linux ----------------------- */
5131 
5132 #if defined(PLAT_mips32_linux)
5133 
5134 /* These regs are trashed by the hidden call. */
5135 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5136 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5137 "$25", "$31"
5138 
5139 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5140  long) == 4. */
5141 
5142 #define CALL_FN_W_v(lval, orig) \
5143  do { \
5144  volatile OrigFn _orig = (orig); \
5145  volatile unsigned long _argvec[1]; \
5146  volatile unsigned long _res; \
5147  _argvec[0] = (unsigned long)_orig.nraddr; \
5148  __asm__ volatile( \
5149  "subu $29, $29, 8 \n\t" \
5150  "sw $28, 0($29) \n\t" \
5151  "sw $31, 4($29) \n\t" \
5152  "subu $29, $29, 16 \n\t" \
5153  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5154  VALGRIND_CALL_NOREDIR_T9 \
5155  "addu $29, $29, 16\n\t" \
5156  "lw $28, 0($29) \n\t" \
5157  "lw $31, 4($29) \n\t" \
5158  "addu $29, $29, 8 \n\t" \
5159  "move %0, $2\n" \
5160  : /*out*/ "=r" (_res) \
5161  : /*in*/ "0" (&_argvec[0]) \
5162  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5163  ); \
5164  lval = (__typeof__(lval)) _res; \
5165  } while (0)
5166 
5167 #define CALL_FN_W_W(lval, orig, arg1) \
5168  do { \
5169  volatile OrigFn _orig = (orig); \
5170  volatile unsigned long _argvec[2]; \
5171  volatile unsigned long _res; \
5172  _argvec[0] = (unsigned long)_orig.nraddr; \
5173  _argvec[1] = (unsigned long)(arg1); \
5174  __asm__ volatile( \
5175  "subu $29, $29, 8 \n\t" \
5176  "sw $28, 0($29) \n\t" \
5177  "sw $31, 4($29) \n\t" \
5178  "subu $29, $29, 16 \n\t" \
5179  "lw $4, 4(%1) \n\t" /* arg1*/ \
5180  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5181  VALGRIND_CALL_NOREDIR_T9 \
5182  "addu $29, $29, 16 \n\t" \
5183  "lw $28, 0($29) \n\t" \
5184  "lw $31, 4($29) \n\t" \
5185  "addu $29, $29, 8 \n\t" \
5186  "move %0, $2\n" \
5187  : /*out*/ "=r" (_res) \
5188  : /*in*/ "0" (&_argvec[0]) \
5189  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5190  ); \
5191  lval = (__typeof__(lval)) _res; \
5192  } while (0)
5193 
5194 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5195  do { \
5196  volatile OrigFn _orig = (orig); \
5197  volatile unsigned long _argvec[3]; \
5198  volatile unsigned long _res; \
5199  _argvec[0] = (unsigned long)_orig.nraddr; \
5200  _argvec[1] = (unsigned long)(arg1); \
5201  _argvec[2] = (unsigned long)(arg2); \
5202  __asm__ volatile( \
5203  "subu $29, $29, 8 \n\t" \
5204  "sw $28, 0($29) \n\t" \
5205  "sw $31, 4($29) \n\t" \
5206  "subu $29, $29, 16 \n\t" \
5207  "lw $4, 4(%1) \n\t" \
5208  "lw $5, 8(%1) \n\t" \
5209  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5210  VALGRIND_CALL_NOREDIR_T9 \
5211  "addu $29, $29, 16 \n\t" \
5212  "lw $28, 0($29) \n\t" \
5213  "lw $31, 4($29) \n\t" \
5214  "addu $29, $29, 8 \n\t" \
5215  "move %0, $2\n" \
5216  : /*out*/ "=r" (_res) \
5217  : /*in*/ "0" (&_argvec[0]) \
5218  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5219  ); \
5220  lval = (__typeof__(lval)) _res; \
5221  } while (0)
5222 
5223 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5224  do { \
5225  volatile OrigFn _orig = (orig); \
5226  volatile unsigned long _argvec[4]; \
5227  volatile unsigned long _res; \
5228  _argvec[0] = (unsigned long)_orig.nraddr; \
5229  _argvec[1] = (unsigned long)(arg1); \
5230  _argvec[2] = (unsigned long)(arg2); \
5231  _argvec[3] = (unsigned long)(arg3); \
5232  __asm__ volatile( \
5233  "subu $29, $29, 8 \n\t" \
5234  "sw $28, 0($29) \n\t" \
5235  "sw $31, 4($29) \n\t" \
5236  "subu $29, $29, 16 \n\t" \
5237  "lw $4, 4(%1) \n\t" \
5238  "lw $5, 8(%1) \n\t" \
5239  "lw $6, 12(%1) \n\t" \
5240  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5241  VALGRIND_CALL_NOREDIR_T9 \
5242  "addu $29, $29, 16 \n\t" \
5243  "lw $28, 0($29) \n\t" \
5244  "lw $31, 4($29) \n\t" \
5245  "addu $29, $29, 8 \n\t" \
5246  "move %0, $2\n" \
5247  : /*out*/ "=r" (_res) \
5248  : /*in*/ "0" (&_argvec[0]) \
5249  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5250  ); \
5251  lval = (__typeof__(lval)) _res; \
5252  } while (0)
5253 
5254 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5255  do { \
5256  volatile OrigFn _orig = (orig); \
5257  volatile unsigned long _argvec[5]; \
5258  volatile unsigned long _res; \
5259  _argvec[0] = (unsigned long)_orig.nraddr; \
5260  _argvec[1] = (unsigned long)(arg1); \
5261  _argvec[2] = (unsigned long)(arg2); \
5262  _argvec[3] = (unsigned long)(arg3); \
5263  _argvec[4] = (unsigned long)(arg4); \
5264  __asm__ volatile( \
5265  "subu $29, $29, 8 \n\t" \
5266  "sw $28, 0($29) \n\t" \
5267  "sw $31, 4($29) \n\t" \
5268  "subu $29, $29, 16 \n\t" \
5269  "lw $4, 4(%1) \n\t" \
5270  "lw $5, 8(%1) \n\t" \
5271  "lw $6, 12(%1) \n\t" \
5272  "lw $7, 16(%1) \n\t" \
5273  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5274  VALGRIND_CALL_NOREDIR_T9 \
5275  "addu $29, $29, 16 \n\t" \
5276  "lw $28, 0($29) \n\t" \
5277  "lw $31, 4($29) \n\t" \
5278  "addu $29, $29, 8 \n\t" \
5279  "move %0, $2\n" \
5280  : /*out*/ "=r" (_res) \
5281  : /*in*/ "0" (&_argvec[0]) \
5282  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5283  ); \
5284  lval = (__typeof__(lval)) _res; \
5285  } while (0)
5286 
5287 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5288  do { \
5289  volatile OrigFn _orig = (orig); \
5290  volatile unsigned long _argvec[6]; \
5291  volatile unsigned long _res; \
5292  _argvec[0] = (unsigned long)_orig.nraddr; \
5293  _argvec[1] = (unsigned long)(arg1); \
5294  _argvec[2] = (unsigned long)(arg2); \
5295  _argvec[3] = (unsigned long)(arg3); \
5296  _argvec[4] = (unsigned long)(arg4); \
5297  _argvec[5] = (unsigned long)(arg5); \
5298  __asm__ volatile( \
5299  "subu $29, $29, 8 \n\t" \
5300  "sw $28, 0($29) \n\t" \
5301  "sw $31, 4($29) \n\t" \
5302  "lw $4, 20(%1) \n\t" \
5303  "subu $29, $29, 24\n\t" \
5304  "sw $4, 16($29) \n\t" \
5305  "lw $4, 4(%1) \n\t" \
5306  "lw $5, 8(%1) \n\t" \
5307  "lw $6, 12(%1) \n\t" \
5308  "lw $7, 16(%1) \n\t" \
5309  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5310  VALGRIND_CALL_NOREDIR_T9 \
5311  "addu $29, $29, 24 \n\t" \
5312  "lw $28, 0($29) \n\t" \
5313  "lw $31, 4($29) \n\t" \
5314  "addu $29, $29, 8 \n\t" \
5315  "move %0, $2\n" \
5316  : /*out*/ "=r" (_res) \
5317  : /*in*/ "0" (&_argvec[0]) \
5318  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5319  ); \
5320  lval = (__typeof__(lval)) _res; \
5321  } while (0)
5322 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5323  do { \
5324  volatile OrigFn _orig = (orig); \
5325  volatile unsigned long _argvec[7]; \
5326  volatile unsigned long _res; \
5327  _argvec[0] = (unsigned long)_orig.nraddr; \
5328  _argvec[1] = (unsigned long)(arg1); \
5329  _argvec[2] = (unsigned long)(arg2); \
5330  _argvec[3] = (unsigned long)(arg3); \
5331  _argvec[4] = (unsigned long)(arg4); \
5332  _argvec[5] = (unsigned long)(arg5); \
5333  _argvec[6] = (unsigned long)(arg6); \
5334  __asm__ volatile( \
5335  "subu $29, $29, 8 \n\t" \
5336  "sw $28, 0($29) \n\t" \
5337  "sw $31, 4($29) \n\t" \
5338  "lw $4, 20(%1) \n\t" \
5339  "subu $29, $29, 32\n\t" \
5340  "sw $4, 16($29) \n\t" \
5341  "lw $4, 24(%1) \n\t" \
5342  "nop\n\t" \
5343  "sw $4, 20($29) \n\t" \
5344  "lw $4, 4(%1) \n\t" \
5345  "lw $5, 8(%1) \n\t" \
5346  "lw $6, 12(%1) \n\t" \
5347  "lw $7, 16(%1) \n\t" \
5348  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5349  VALGRIND_CALL_NOREDIR_T9 \
5350  "addu $29, $29, 32 \n\t" \
5351  "lw $28, 0($29) \n\t" \
5352  "lw $31, 4($29) \n\t" \
5353  "addu $29, $29, 8 \n\t" \
5354  "move %0, $2\n" \
5355  : /*out*/ "=r" (_res) \
5356  : /*in*/ "0" (&_argvec[0]) \
5357  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5358  ); \
5359  lval = (__typeof__(lval)) _res; \
5360  } while (0)
5361 
5362 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5363  arg7) \
5364  do { \
5365  volatile OrigFn _orig = (orig); \
5366  volatile unsigned long _argvec[8]; \
5367  volatile unsigned long _res; \
5368  _argvec[0] = (unsigned long)_orig.nraddr; \
5369  _argvec[1] = (unsigned long)(arg1); \
5370  _argvec[2] = (unsigned long)(arg2); \
5371  _argvec[3] = (unsigned long)(arg3); \
5372  _argvec[4] = (unsigned long)(arg4); \
5373  _argvec[5] = (unsigned long)(arg5); \
5374  _argvec[6] = (unsigned long)(arg6); \
5375  _argvec[7] = (unsigned long)(arg7); \
5376  __asm__ volatile( \
5377  "subu $29, $29, 8 \n\t" \
5378  "sw $28, 0($29) \n\t" \
5379  "sw $31, 4($29) \n\t" \
5380  "lw $4, 20(%1) \n\t" \
5381  "subu $29, $29, 32\n\t" \
5382  "sw $4, 16($29) \n\t" \
5383  "lw $4, 24(%1) \n\t" \
5384  "sw $4, 20($29) \n\t" \
5385  "lw $4, 28(%1) \n\t" \
5386  "sw $4, 24($29) \n\t" \
5387  "lw $4, 4(%1) \n\t" \
5388  "lw $5, 8(%1) \n\t" \
5389  "lw $6, 12(%1) \n\t" \
5390  "lw $7, 16(%1) \n\t" \
5391  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5392  VALGRIND_CALL_NOREDIR_T9 \
5393  "addu $29, $29, 32 \n\t" \
5394  "lw $28, 0($29) \n\t" \
5395  "lw $31, 4($29) \n\t" \
5396  "addu $29, $29, 8 \n\t" \
5397  "move %0, $2\n" \
5398  : /*out*/ "=r" (_res) \
5399  : /*in*/ "0" (&_argvec[0]) \
5400  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5401  ); \
5402  lval = (__typeof__(lval)) _res; \
5403  } while (0)
5404 
5405 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5406  arg7,arg8) \
5407  do { \
5408  volatile OrigFn _orig = (orig); \
5409  volatile unsigned long _argvec[9]; \
5410  volatile unsigned long _res; \
5411  _argvec[0] = (unsigned long)_orig.nraddr; \
5412  _argvec[1] = (unsigned long)(arg1); \
5413  _argvec[2] = (unsigned long)(arg2); \
5414  _argvec[3] = (unsigned long)(arg3); \
5415  _argvec[4] = (unsigned long)(arg4); \
5416  _argvec[5] = (unsigned long)(arg5); \
5417  _argvec[6] = (unsigned long)(arg6); \
5418  _argvec[7] = (unsigned long)(arg7); \
5419  _argvec[8] = (unsigned long)(arg8); \
5420  __asm__ volatile( \
5421  "subu $29, $29, 8 \n\t" \
5422  "sw $28, 0($29) \n\t" \
5423  "sw $31, 4($29) \n\t" \
5424  "lw $4, 20(%1) \n\t" \
5425  "subu $29, $29, 40\n\t" \
5426  "sw $4, 16($29) \n\t" \
5427  "lw $4, 24(%1) \n\t" \
5428  "sw $4, 20($29) \n\t" \
5429  "lw $4, 28(%1) \n\t" \
5430  "sw $4, 24($29) \n\t" \
5431  "lw $4, 32(%1) \n\t" \
5432  "sw $4, 28($29) \n\t" \
5433  "lw $4, 4(%1) \n\t" \
5434  "lw $5, 8(%1) \n\t" \
5435  "lw $6, 12(%1) \n\t" \
5436  "lw $7, 16(%1) \n\t" \
5437  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5438  VALGRIND_CALL_NOREDIR_T9 \
5439  "addu $29, $29, 40 \n\t" \
5440  "lw $28, 0($29) \n\t" \
5441  "lw $31, 4($29) \n\t" \
5442  "addu $29, $29, 8 \n\t" \
5443  "move %0, $2\n" \
5444  : /*out*/ "=r" (_res) \
5445  : /*in*/ "0" (&_argvec[0]) \
5446  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5447  ); \
5448  lval = (__typeof__(lval)) _res; \
5449  } while (0)
5450 
5451 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5452  arg7,arg8,arg9) \
5453  do { \
5454  volatile OrigFn _orig = (orig); \
5455  volatile unsigned long _argvec[10]; \
5456  volatile unsigned long _res; \
5457  _argvec[0] = (unsigned long)_orig.nraddr; \
5458  _argvec[1] = (unsigned long)(arg1); \
5459  _argvec[2] = (unsigned long)(arg2); \
5460  _argvec[3] = (unsigned long)(arg3); \
5461  _argvec[4] = (unsigned long)(arg4); \
5462  _argvec[5] = (unsigned long)(arg5); \
5463  _argvec[6] = (unsigned long)(arg6); \
5464  _argvec[7] = (unsigned long)(arg7); \
5465  _argvec[8] = (unsigned long)(arg8); \
5466  _argvec[9] = (unsigned long)(arg9); \
5467  __asm__ volatile( \
5468  "subu $29, $29, 8 \n\t" \
5469  "sw $28, 0($29) \n\t" \
5470  "sw $31, 4($29) \n\t" \
5471  "lw $4, 20(%1) \n\t" \
5472  "subu $29, $29, 40\n\t" \
5473  "sw $4, 16($29) \n\t" \
5474  "lw $4, 24(%1) \n\t" \
5475  "sw $4, 20($29) \n\t" \
5476  "lw $4, 28(%1) \n\t" \
5477  "sw $4, 24($29) \n\t" \
5478  "lw $4, 32(%1) \n\t" \
5479  "sw $4, 28($29) \n\t" \
5480  "lw $4, 36(%1) \n\t" \
5481  "sw $4, 32($29) \n\t" \
5482  "lw $4, 4(%1) \n\t" \
5483  "lw $5, 8(%1) \n\t" \
5484  "lw $6, 12(%1) \n\t" \
5485  "lw $7, 16(%1) \n\t" \
5486  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5487  VALGRIND_CALL_NOREDIR_T9 \
5488  "addu $29, $29, 40 \n\t" \
5489  "lw $28, 0($29) \n\t" \
5490  "lw $31, 4($29) \n\t" \
5491  "addu $29, $29, 8 \n\t" \
5492  "move %0, $2\n" \
5493  : /*out*/ "=r" (_res) \
5494  : /*in*/ "0" (&_argvec[0]) \
5495  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5496  ); \
5497  lval = (__typeof__(lval)) _res; \
5498  } while (0)
5499 
5500 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5501  arg7,arg8,arg9,arg10) \
5502  do { \
5503  volatile OrigFn _orig = (orig); \
5504  volatile unsigned long _argvec[11]; \
5505  volatile unsigned long _res; \
5506  _argvec[0] = (unsigned long)_orig.nraddr; \
5507  _argvec[1] = (unsigned long)(arg1); \
5508  _argvec[2] = (unsigned long)(arg2); \
5509  _argvec[3] = (unsigned long)(arg3); \
5510  _argvec[4] = (unsigned long)(arg4); \
5511  _argvec[5] = (unsigned long)(arg5); \
5512  _argvec[6] = (unsigned long)(arg6); \
5513  _argvec[7] = (unsigned long)(arg7); \
5514  _argvec[8] = (unsigned long)(arg8); \
5515  _argvec[9] = (unsigned long)(arg9); \
5516  _argvec[10] = (unsigned long)(arg10); \
5517  __asm__ volatile( \
5518  "subu $29, $29, 8 \n\t" \
5519  "sw $28, 0($29) \n\t" \
5520  "sw $31, 4($29) \n\t" \
5521  "lw $4, 20(%1) \n\t" \
5522  "subu $29, $29, 48\n\t" \
5523  "sw $4, 16($29) \n\t" \
5524  "lw $4, 24(%1) \n\t" \
5525  "sw $4, 20($29) \n\t" \
5526  "lw $4, 28(%1) \n\t" \
5527  "sw $4, 24($29) \n\t" \
5528  "lw $4, 32(%1) \n\t" \
5529  "sw $4, 28($29) \n\t" \
5530  "lw $4, 36(%1) \n\t" \
5531  "sw $4, 32($29) \n\t" \
5532  "lw $4, 40(%1) \n\t" \
5533  "sw $4, 36($29) \n\t" \
5534  "lw $4, 4(%1) \n\t" \
5535  "lw $5, 8(%1) \n\t" \
5536  "lw $6, 12(%1) \n\t" \
5537  "lw $7, 16(%1) \n\t" \
5538  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5539  VALGRIND_CALL_NOREDIR_T9 \
5540  "addu $29, $29, 48 \n\t" \
5541  "lw $28, 0($29) \n\t" \
5542  "lw $31, 4($29) \n\t" \
5543  "addu $29, $29, 8 \n\t" \
5544  "move %0, $2\n" \
5545  : /*out*/ "=r" (_res) \
5546  : /*in*/ "0" (&_argvec[0]) \
5547  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5548  ); \
5549  lval = (__typeof__(lval)) _res; \
5550  } while (0)
5551 
5552 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5553  arg6,arg7,arg8,arg9,arg10, \
5554  arg11) \
5555  do { \
5556  volatile OrigFn _orig = (orig); \
5557  volatile unsigned long _argvec[12]; \
5558  volatile unsigned long _res; \
5559  _argvec[0] = (unsigned long)_orig.nraddr; \
5560  _argvec[1] = (unsigned long)(arg1); \
5561  _argvec[2] = (unsigned long)(arg2); \
5562  _argvec[3] = (unsigned long)(arg3); \
5563  _argvec[4] = (unsigned long)(arg4); \
5564  _argvec[5] = (unsigned long)(arg5); \
5565  _argvec[6] = (unsigned long)(arg6); \
5566  _argvec[7] = (unsigned long)(arg7); \
5567  _argvec[8] = (unsigned long)(arg8); \
5568  _argvec[9] = (unsigned long)(arg9); \
5569  _argvec[10] = (unsigned long)(arg10); \
5570  _argvec[11] = (unsigned long)(arg11); \
5571  __asm__ volatile( \
5572  "subu $29, $29, 8 \n\t" \
5573  "sw $28, 0($29) \n\t" \
5574  "sw $31, 4($29) \n\t" \
5575  "lw $4, 20(%1) \n\t" \
5576  "subu $29, $29, 48\n\t" \
5577  "sw $4, 16($29) \n\t" \
5578  "lw $4, 24(%1) \n\t" \
5579  "sw $4, 20($29) \n\t" \
5580  "lw $4, 28(%1) \n\t" \
5581  "sw $4, 24($29) \n\t" \
5582  "lw $4, 32(%1) \n\t" \
5583  "sw $4, 28($29) \n\t" \
5584  "lw $4, 36(%1) \n\t" \
5585  "sw $4, 32($29) \n\t" \
5586  "lw $4, 40(%1) \n\t" \
5587  "sw $4, 36($29) \n\t" \
5588  "lw $4, 44(%1) \n\t" \
5589  "sw $4, 40($29) \n\t" \
5590  "lw $4, 4(%1) \n\t" \
5591  "lw $5, 8(%1) \n\t" \
5592  "lw $6, 12(%1) \n\t" \
5593  "lw $7, 16(%1) \n\t" \
5594  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5595  VALGRIND_CALL_NOREDIR_T9 \
5596  "addu $29, $29, 48 \n\t" \
5597  "lw $28, 0($29) \n\t" \
5598  "lw $31, 4($29) \n\t" \
5599  "addu $29, $29, 8 \n\t" \
5600  "move %0, $2\n" \
5601  : /*out*/ "=r" (_res) \
5602  : /*in*/ "0" (&_argvec[0]) \
5603  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5604  ); \
5605  lval = (__typeof__(lval)) _res; \
5606  } while (0)
5607 
5608 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5609  arg6,arg7,arg8,arg9,arg10, \
5610  arg11,arg12) \
5611  do { \
5612  volatile OrigFn _orig = (orig); \
5613  volatile unsigned long _argvec[13]; \
5614  volatile unsigned long _res; \
5615  _argvec[0] = (unsigned long)_orig.nraddr; \
5616  _argvec[1] = (unsigned long)(arg1); \
5617  _argvec[2] = (unsigned long)(arg2); \
5618  _argvec[3] = (unsigned long)(arg3); \
5619  _argvec[4] = (unsigned long)(arg4); \
5620  _argvec[5] = (unsigned long)(arg5); \
5621  _argvec[6] = (unsigned long)(arg6); \
5622  _argvec[7] = (unsigned long)(arg7); \
5623  _argvec[8] = (unsigned long)(arg8); \
5624  _argvec[9] = (unsigned long)(arg9); \
5625  _argvec[10] = (unsigned long)(arg10); \
5626  _argvec[11] = (unsigned long)(arg11); \
5627  _argvec[12] = (unsigned long)(arg12); \
5628  __asm__ volatile( \
5629  "subu $29, $29, 8 \n\t" \
5630  "sw $28, 0($29) \n\t" \
5631  "sw $31, 4($29) \n\t" \
5632  "lw $4, 20(%1) \n\t" \
5633  "subu $29, $29, 56\n\t" \
5634  "sw $4, 16($29) \n\t" \
5635  "lw $4, 24(%1) \n\t" \
5636  "sw $4, 20($29) \n\t" \
5637  "lw $4, 28(%1) \n\t" \
5638  "sw $4, 24($29) \n\t" \
5639  "lw $4, 32(%1) \n\t" \
5640  "sw $4, 28($29) \n\t" \
5641  "lw $4, 36(%1) \n\t" \
5642  "sw $4, 32($29) \n\t" \
5643  "lw $4, 40(%1) \n\t" \
5644  "sw $4, 36($29) \n\t" \
5645  "lw $4, 44(%1) \n\t" \
5646  "sw $4, 40($29) \n\t" \
5647  "lw $4, 48(%1) \n\t" \
5648  "sw $4, 44($29) \n\t" \
5649  "lw $4, 4(%1) \n\t" \
5650  "lw $5, 8(%1) \n\t" \
5651  "lw $6, 12(%1) \n\t" \
5652  "lw $7, 16(%1) \n\t" \
5653  "lw $25, 0(%1) \n\t" /* target->t9 */ \
5654  VALGRIND_CALL_NOREDIR_T9 \
5655  "addu $29, $29, 56 \n\t" \
5656  "lw $28, 0($29) \n\t" \
5657  "lw $31, 4($29) \n\t" \
5658  "addu $29, $29, 8 \n\t" \
5659  "move %0, $2\n" \
5660  : /*out*/ "=r" (_res) \
5661  : /*in*/ "r" (&_argvec[0]) \
5662  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5663  ); \
5664  lval = (__typeof__(lval)) _res; \
5665  } while (0)
5666 
5667 #endif /* PLAT_mips32_linux */
5668 
5669 /* ------------------------- mips64-linux ------------------------- */
5670 
5671 #if defined(PLAT_mips64_linux)
5672 
5673 /* These regs are trashed by the hidden call. */
5674 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5675 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5676 "$25", "$31"
5677 
5678 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5679  long) == 4. */
5680 
5681 #define CALL_FN_W_v(lval, orig) \
5682  do { \
5683  volatile OrigFn _orig = (orig); \
5684  volatile unsigned long _argvec[1]; \
5685  volatile unsigned long _res; \
5686  _argvec[0] = (unsigned long)_orig.nraddr; \
5687  __asm__ volatile( \
5688  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5689  VALGRIND_CALL_NOREDIR_T9 \
5690  "move %0, $2\n" \
5691  : /*out*/ "=r" (_res) \
5692  : /*in*/ "0" (&_argvec[0]) \
5693  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5694  ); \
5695  lval = (__typeof__(lval)) _res; \
5696  } while (0)
5697 
5698 #define CALL_FN_W_W(lval, orig, arg1) \
5699  do { \
5700  volatile OrigFn _orig = (orig); \
5701  volatile unsigned long _argvec[2]; \
5702  volatile unsigned long _res; \
5703  _argvec[0] = (unsigned long)_orig.nraddr; \
5704  _argvec[1] = (unsigned long)(arg1); \
5705  __asm__ volatile( \
5706  "ld $4, 8(%1)\n\t" /* arg1*/ \
5707  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5708  VALGRIND_CALL_NOREDIR_T9 \
5709  "move %0, $2\n" \
5710  : /*out*/ "=r" (_res) \
5711  : /*in*/ "r" (&_argvec[0]) \
5712  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5713  ); \
5714  lval = (__typeof__(lval)) _res; \
5715  } while (0)
5716 
5717 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5718  do { \
5719  volatile OrigFn _orig = (orig); \
5720  volatile unsigned long _argvec[3]; \
5721  volatile unsigned long _res; \
5722  _argvec[0] = (unsigned long)_orig.nraddr; \
5723  _argvec[1] = (unsigned long)(arg1); \
5724  _argvec[2] = (unsigned long)(arg2); \
5725  __asm__ volatile( \
5726  "ld $4, 8(%1)\n\t" \
5727  "ld $5, 16(%1)\n\t" \
5728  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5729  VALGRIND_CALL_NOREDIR_T9 \
5730  "move %0, $2\n" \
5731  : /*out*/ "=r" (_res) \
5732  : /*in*/ "r" (&_argvec[0]) \
5733  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5734  ); \
5735  lval = (__typeof__(lval)) _res; \
5736  } while (0)
5737 
5738 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5739  do { \
5740  volatile OrigFn _orig = (orig); \
5741  volatile unsigned long _argvec[4]; \
5742  volatile unsigned long _res; \
5743  _argvec[0] = (unsigned long)_orig.nraddr; \
5744  _argvec[1] = (unsigned long)(arg1); \
5745  _argvec[2] = (unsigned long)(arg2); \
5746  _argvec[3] = (unsigned long)(arg3); \
5747  __asm__ volatile( \
5748  "ld $4, 8(%1)\n\t" \
5749  "ld $5, 16(%1)\n\t" \
5750  "ld $6, 24(%1)\n\t" \
5751  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5752  VALGRIND_CALL_NOREDIR_T9 \
5753  "move %0, $2\n" \
5754  : /*out*/ "=r" (_res) \
5755  : /*in*/ "r" (&_argvec[0]) \
5756  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5757  ); \
5758  lval = (__typeof__(lval)) _res; \
5759  } while (0)
5760 
5761 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5762  do { \
5763  volatile OrigFn _orig = (orig); \
5764  volatile unsigned long _argvec[5]; \
5765  volatile unsigned long _res; \
5766  _argvec[0] = (unsigned long)_orig.nraddr; \
5767  _argvec[1] = (unsigned long)(arg1); \
5768  _argvec[2] = (unsigned long)(arg2); \
5769  _argvec[3] = (unsigned long)(arg3); \
5770  _argvec[4] = (unsigned long)(arg4); \
5771  __asm__ volatile( \
5772  "ld $4, 8(%1)\n\t" \
5773  "ld $5, 16(%1)\n\t" \
5774  "ld $6, 24(%1)\n\t" \
5775  "ld $7, 32(%1)\n\t" \
5776  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5777  VALGRIND_CALL_NOREDIR_T9 \
5778  "move %0, $2\n" \
5779  : /*out*/ "=r" (_res) \
5780  : /*in*/ "r" (&_argvec[0]) \
5781  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5782  ); \
5783  lval = (__typeof__(lval)) _res; \
5784  } while (0)
5785 
5786 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5787  do { \
5788  volatile OrigFn _orig = (orig); \
5789  volatile unsigned long _argvec[6]; \
5790  volatile unsigned long _res; \
5791  _argvec[0] = (unsigned long)_orig.nraddr; \
5792  _argvec[1] = (unsigned long)(arg1); \
5793  _argvec[2] = (unsigned long)(arg2); \
5794  _argvec[3] = (unsigned long)(arg3); \
5795  _argvec[4] = (unsigned long)(arg4); \
5796  _argvec[5] = (unsigned long)(arg5); \
5797  __asm__ volatile( \
5798  "ld $4, 8(%1)\n\t" \
5799  "ld $5, 16(%1)\n\t" \
5800  "ld $6, 24(%1)\n\t" \
5801  "ld $7, 32(%1)\n\t" \
5802  "ld $8, 40(%1)\n\t" \
5803  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5804  VALGRIND_CALL_NOREDIR_T9 \
5805  "move %0, $2\n" \
5806  : /*out*/ "=r" (_res) \
5807  : /*in*/ "r" (&_argvec[0]) \
5808  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5809  ); \
5810  lval = (__typeof__(lval)) _res; \
5811  } while (0)
5812 
5813 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5814  do { \
5815  volatile OrigFn _orig = (orig); \
5816  volatile unsigned long _argvec[7]; \
5817  volatile unsigned long _res; \
5818  _argvec[0] = (unsigned long)_orig.nraddr; \
5819  _argvec[1] = (unsigned long)(arg1); \
5820  _argvec[2] = (unsigned long)(arg2); \
5821  _argvec[3] = (unsigned long)(arg3); \
5822  _argvec[4] = (unsigned long)(arg4); \
5823  _argvec[5] = (unsigned long)(arg5); \
5824  _argvec[6] = (unsigned long)(arg6); \
5825  __asm__ volatile( \
5826  "ld $4, 8(%1)\n\t" \
5827  "ld $5, 16(%1)\n\t" \
5828  "ld $6, 24(%1)\n\t" \
5829  "ld $7, 32(%1)\n\t" \
5830  "ld $8, 40(%1)\n\t" \
5831  "ld $9, 48(%1)\n\t" \
5832  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5833  VALGRIND_CALL_NOREDIR_T9 \
5834  "move %0, $2\n" \
5835  : /*out*/ "=r" (_res) \
5836  : /*in*/ "r" (&_argvec[0]) \
5837  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5838  ); \
5839  lval = (__typeof__(lval)) _res; \
5840  } while (0)
5841 
5842 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5843  arg7) \
5844  do { \
5845  volatile OrigFn _orig = (orig); \
5846  volatile unsigned long _argvec[8]; \
5847  volatile unsigned long _res; \
5848  _argvec[0] = (unsigned long)_orig.nraddr; \
5849  _argvec[1] = (unsigned long)(arg1); \
5850  _argvec[2] = (unsigned long)(arg2); \
5851  _argvec[3] = (unsigned long)(arg3); \
5852  _argvec[4] = (unsigned long)(arg4); \
5853  _argvec[5] = (unsigned long)(arg5); \
5854  _argvec[6] = (unsigned long)(arg6); \
5855  _argvec[7] = (unsigned long)(arg7); \
5856  __asm__ volatile( \
5857  "ld $4, 8(%1)\n\t" \
5858  "ld $5, 16(%1)\n\t" \
5859  "ld $6, 24(%1)\n\t" \
5860  "ld $7, 32(%1)\n\t" \
5861  "ld $8, 40(%1)\n\t" \
5862  "ld $9, 48(%1)\n\t" \
5863  "ld $10, 56(%1)\n\t" \
5864  "ld $25, 0(%1) \n\t" /* target->t9 */ \
5865  VALGRIND_CALL_NOREDIR_T9 \
5866  "move %0, $2\n" \
5867  : /*out*/ "=r" (_res) \
5868  : /*in*/ "r" (&_argvec[0]) \
5869  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5870  ); \
5871  lval = (__typeof__(lval)) _res; \
5872  } while (0)
5873 
5874 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5875  arg7,arg8) \
5876  do { \
5877  volatile OrigFn _orig = (orig); \
5878  volatile unsigned long _argvec[9]; \
5879  volatile unsigned long _res; \
5880  _argvec[0] = (unsigned long)_orig.nraddr; \
5881  _argvec[1] = (unsigned long)(arg1); \
5882  _argvec[2] = (unsigned long)(arg2); \
5883  _argvec[3] = (unsigned long)(arg3); \
5884  _argvec[4] = (unsigned long)(arg4); \
5885  _argvec[5] = (unsigned long)(arg5); \
5886  _argvec[6] = (unsigned long)(arg6); \
5887  _argvec[7] = (unsigned long)(arg7); \
5888  _argvec[8] = (unsigned long)(arg8); \
5889  __asm__ volatile( \
5890  "ld $4, 8(%1)\n\t" \
5891  "ld $5, 16(%1)\n\t" \
5892  "ld $6, 24(%1)\n\t" \
5893  "ld $7, 32(%1)\n\t" \
5894  "ld $8, 40(%1)\n\t" \
5895  "ld $9, 48(%1)\n\t" \
5896  "ld $10, 56(%1)\n\t" \
5897  "ld $11, 64(%1)\n\t" \
5898  "ld $25, 0(%1) \n\t" /* target->t9 */ \
5899  VALGRIND_CALL_NOREDIR_T9 \
5900  "move %0, $2\n" \
5901  : /*out*/ "=r" (_res) \
5902  : /*in*/ "r" (&_argvec[0]) \
5903  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5904  ); \
5905  lval = (__typeof__(lval)) _res; \
5906  } while (0)
5907 
5908 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5909  arg7,arg8,arg9) \
5910  do { \
5911  volatile OrigFn _orig = (orig); \
5912  volatile unsigned long _argvec[10]; \
5913  volatile unsigned long _res; \
5914  _argvec[0] = (unsigned long)_orig.nraddr; \
5915  _argvec[1] = (unsigned long)(arg1); \
5916  _argvec[2] = (unsigned long)(arg2); \
5917  _argvec[3] = (unsigned long)(arg3); \
5918  _argvec[4] = (unsigned long)(arg4); \
5919  _argvec[5] = (unsigned long)(arg5); \
5920  _argvec[6] = (unsigned long)(arg6); \
5921  _argvec[7] = (unsigned long)(arg7); \
5922  _argvec[8] = (unsigned long)(arg8); \
5923  _argvec[9] = (unsigned long)(arg9); \
5924  __asm__ volatile( \
5925  "dsubu $29, $29, 8\n\t" \
5926  "ld $4, 72(%1)\n\t" \
5927  "sd $4, 0($29)\n\t" \
5928  "ld $4, 8(%1)\n\t" \
5929  "ld $5, 16(%1)\n\t" \
5930  "ld $6, 24(%1)\n\t" \
5931  "ld $7, 32(%1)\n\t" \
5932  "ld $8, 40(%1)\n\t" \
5933  "ld $9, 48(%1)\n\t" \
5934  "ld $10, 56(%1)\n\t" \
5935  "ld $11, 64(%1)\n\t" \
5936  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5937  VALGRIND_CALL_NOREDIR_T9 \
5938  "daddu $29, $29, 8\n\t" \
5939  "move %0, $2\n" \
5940  : /*out*/ "=r" (_res) \
5941  : /*in*/ "r" (&_argvec[0]) \
5942  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5943  ); \
5944  lval = (__typeof__(lval)) _res; \
5945  } while (0)
5946 
5947 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5948  arg7,arg8,arg9,arg10) \
5949  do { \
5950  volatile OrigFn _orig = (orig); \
5951  volatile unsigned long _argvec[11]; \
5952  volatile unsigned long _res; \
5953  _argvec[0] = (unsigned long)_orig.nraddr; \
5954  _argvec[1] = (unsigned long)(arg1); \
5955  _argvec[2] = (unsigned long)(arg2); \
5956  _argvec[3] = (unsigned long)(arg3); \
5957  _argvec[4] = (unsigned long)(arg4); \
5958  _argvec[5] = (unsigned long)(arg5); \
5959  _argvec[6] = (unsigned long)(arg6); \
5960  _argvec[7] = (unsigned long)(arg7); \
5961  _argvec[8] = (unsigned long)(arg8); \
5962  _argvec[9] = (unsigned long)(arg9); \
5963  _argvec[10] = (unsigned long)(arg10); \
5964  __asm__ volatile( \
5965  "dsubu $29, $29, 16\n\t" \
5966  "ld $4, 72(%1)\n\t" \
5967  "sd $4, 0($29)\n\t" \
5968  "ld $4, 80(%1)\n\t" \
5969  "sd $4, 8($29)\n\t" \
5970  "ld $4, 8(%1)\n\t" \
5971  "ld $5, 16(%1)\n\t" \
5972  "ld $6, 24(%1)\n\t" \
5973  "ld $7, 32(%1)\n\t" \
5974  "ld $8, 40(%1)\n\t" \
5975  "ld $9, 48(%1)\n\t" \
5976  "ld $10, 56(%1)\n\t" \
5977  "ld $11, 64(%1)\n\t" \
5978  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5979  VALGRIND_CALL_NOREDIR_T9 \
5980  "daddu $29, $29, 16\n\t" \
5981  "move %0, $2\n" \
5982  : /*out*/ "=r" (_res) \
5983  : /*in*/ "r" (&_argvec[0]) \
5984  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5985  ); \
5986  lval = (__typeof__(lval)) _res; \
5987  } while (0)
5988 
5989 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5990  arg6,arg7,arg8,arg9,arg10, \
5991  arg11) \
5992  do { \
5993  volatile OrigFn _orig = (orig); \
5994  volatile unsigned long _argvec[12]; \
5995  volatile unsigned long _res; \
5996  _argvec[0] = (unsigned long)_orig.nraddr; \
5997  _argvec[1] = (unsigned long)(arg1); \
5998  _argvec[2] = (unsigned long)(arg2); \
5999  _argvec[3] = (unsigned long)(arg3); \
6000  _argvec[4] = (unsigned long)(arg4); \
6001  _argvec[5] = (unsigned long)(arg5); \
6002  _argvec[6] = (unsigned long)(arg6); \
6003  _argvec[7] = (unsigned long)(arg7); \
6004  _argvec[8] = (unsigned long)(arg8); \
6005  _argvec[9] = (unsigned long)(arg9); \
6006  _argvec[10] = (unsigned long)(arg10); \
6007  _argvec[11] = (unsigned long)(arg11); \
6008  __asm__ volatile( \
6009  "dsubu $29, $29, 24\n\t" \
6010  "ld $4, 72(%1)\n\t" \
6011  "sd $4, 0($29)\n\t" \
6012  "ld $4, 80(%1)\n\t" \
6013  "sd $4, 8($29)\n\t" \
6014  "ld $4, 88(%1)\n\t" \
6015  "sd $4, 16($29)\n\t" \
6016  "ld $4, 8(%1)\n\t" \
6017  "ld $5, 16(%1)\n\t" \
6018  "ld $6, 24(%1)\n\t" \
6019  "ld $7, 32(%1)\n\t" \
6020  "ld $8, 40(%1)\n\t" \
6021  "ld $9, 48(%1)\n\t" \
6022  "ld $10, 56(%1)\n\t" \
6023  "ld $11, 64(%1)\n\t" \
6024  "ld $25, 0(%1)\n\t" /* target->t9 */ \
6025  VALGRIND_CALL_NOREDIR_T9 \
6026  "daddu $29, $29, 24\n\t" \
6027  "move %0, $2\n" \
6028  : /*out*/ "=r" (_res) \
6029  : /*in*/ "r" (&_argvec[0]) \
6030  : /*trash*/ "memory", __CALLER_SAVED_REGS \
6031  ); \
6032  lval = (__typeof__(lval)) _res; \
6033  } while (0)
6034 
6035 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6036  arg6,arg7,arg8,arg9,arg10, \
6037  arg11,arg12) \
6038  do { \
6039  volatile OrigFn _orig = (orig); \
6040  volatile unsigned long _argvec[13]; \
6041  volatile unsigned long _res; \
6042  _argvec[0] = (unsigned long)_orig.nraddr; \
6043  _argvec[1] = (unsigned long)(arg1); \
6044  _argvec[2] = (unsigned long)(arg2); \
6045  _argvec[3] = (unsigned long)(arg3); \
6046  _argvec[4] = (unsigned long)(arg4); \
6047  _argvec[5] = (unsigned long)(arg5); \
6048  _argvec[6] = (unsigned long)(arg6); \
6049  _argvec[7] = (unsigned long)(arg7); \
6050  _argvec[8] = (unsigned long)(arg8); \
6051  _argvec[9] = (unsigned long)(arg9); \
6052  _argvec[10] = (unsigned long)(arg10); \
6053  _argvec[11] = (unsigned long)(arg11); \
6054  _argvec[12] = (unsigned long)(arg12); \
6055  __asm__ volatile( \
6056  "dsubu $29, $29, 32\n\t" \
6057  "ld $4, 72(%1)\n\t" \
6058  "sd $4, 0($29)\n\t" \
6059  "ld $4, 80(%1)\n\t" \
6060  "sd $4, 8($29)\n\t" \
6061  "ld $4, 88(%1)\n\t" \
6062  "sd $4, 16($29)\n\t" \
6063  "ld $4, 96(%1)\n\t" \
6064  "sd $4, 24($29)\n\t" \
6065  "ld $4, 8(%1)\n\t" \
6066  "ld $5, 16(%1)\n\t" \
6067  "ld $6, 24(%1)\n\t" \
6068  "ld $7, 32(%1)\n\t" \
6069  "ld $8, 40(%1)\n\t" \
6070  "ld $9, 48(%1)\n\t" \
6071  "ld $10, 56(%1)\n\t" \
6072  "ld $11, 64(%1)\n\t" \
6073  "ld $25, 0(%1)\n\t" /* target->t9 */ \
6074  VALGRIND_CALL_NOREDIR_T9 \
6075  "daddu $29, $29, 32\n\t" \
6076  "move %0, $2\n" \
6077  : /*out*/ "=r" (_res) \
6078  : /*in*/ "r" (&_argvec[0]) \
6079  : /*trash*/ "memory", __CALLER_SAVED_REGS \
6080  ); \
6081  lval = (__typeof__(lval)) _res; \
6082  } while (0)
6083 
6084 #endif /* PLAT_mips64_linux */
6085 
6086 
6087 /* ------------------------------------------------------------------ */
6088 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6089 /* */
6090 /* ------------------------------------------------------------------ */
6091 
6092 /* Some request codes. There are many more of these, but most are not
6093  exposed to end-user view. These are the public ones, all of the
6094  form 0x1000 + small_number.
6095 
6096  Core ones are in the range 0x00000000--0x0000ffff. The non-public
6097  ones start at 0x2000.
6098 */
6099 
6100 /* These macros are used by tools -- they must be public, but don't
6101  embed them into other programs. */
6102 #define VG_USERREQ_TOOL_BASE(a,b) \
6103  ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6104 #define VG_IS_TOOL_USERREQ(a, b, v) \
6105  (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6106 
6107 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6108  This enum comprises an ABI exported by Valgrind to programs
6109  which use client requests. DO NOT CHANGE THE ORDER OF THESE
6110  ENTRIES, NOR DELETE ANY -- add new ones at the end. */
6111 typedef
6112  enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6113  VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6114 
6115  /* These allow any function to be called from the simulated
6116  CPU but run on the real CPU. Nb: the first arg passed to
6117  the function is always the ThreadId of the running
6118  thread! So CLIENT_CALL0 actually requires a 1 arg
6119  function, etc. */
6120  VG_USERREQ__CLIENT_CALL0 = 0x1101,
6121  VG_USERREQ__CLIENT_CALL1 = 0x1102,
6122  VG_USERREQ__CLIENT_CALL2 = 0x1103,
6123  VG_USERREQ__CLIENT_CALL3 = 0x1104,
6124 
6125  /* Can be useful in regression testing suites -- eg. can
6126  send Valgrind's output to /dev/null and still count
6127  errors. */
6128  VG_USERREQ__COUNT_ERRORS = 0x1201,
6129 
6130  /* Allows the client program and/or gdbserver to execute a monitor
6131  command. */
6132  VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6133 
6134  /* These are useful and can be interpreted by any tool that
6135  tracks malloc() et al, by using vg_replace_malloc.c. */
6136  VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6137  VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6138  VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6139  /* Memory pool support. */
6140  VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6141  VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6142  VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6143  VG_USERREQ__MEMPOOL_FREE = 0x1306,
6144  VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6145  VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6146  VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6147  VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6148 
6149  /* Allow printfs to valgrind log. */
6150  /* The first two pass the va_list argument by value, which
6151  assumes it is the same size as or smaller than a UWord,
6152  which generally isn't the case. Hence are deprecated.
6153  The second two pass the vargs by reference and so are
6154  immune to this problem. */
6155  /* both :: char* fmt, va_list vargs (DEPRECATED) */
6156  VG_USERREQ__PRINTF = 0x1401,
6157  VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6158  /* both :: char* fmt, va_list* vargs */
6159  VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6160  VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6161 
6162  /* Stack support. */
6163  VG_USERREQ__STACK_REGISTER = 0x1501,
6164  VG_USERREQ__STACK_DEREGISTER = 0x1502,
6165  VG_USERREQ__STACK_CHANGE = 0x1503,
6166 
6167  /* Wine support */
6168  VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6169 
6170  /* Querying of debug info. */
6171  VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6172 
6173  /* Disable/enable error reporting level. Takes a single
6174  Word arg which is the delta to this thread's error
6175  disablement indicator. Hence 1 disables or further
6176  disables errors, and -1 moves back towards enablement.
6177  Other values are not allowed. */
6178  VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6179 
6180  /* Initialize IR injection */
6181  VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
6182  } Vg_ClientRequest;
6183 
6184 #if !defined(__GNUC__)
6185 # define __extension__ /* */
6186 #endif
6187 
6188 
6189 /* Returns the number of Valgrinds this code is running under. That
6190  is, 0 if running natively, 1 if running under Valgrind, 2 if
6191  running under Valgrind which is running under another Valgrind,
6192  etc. */
6193 #define RUNNING_ON_VALGRIND \
6194  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6195  VG_USERREQ__RUNNING_ON_VALGRIND, \
6196  0, 0, 0, 0, 0) \
6197 
6198 
6199 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6200  _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6201  since it provides a way to make sure valgrind will retranslate the
6202  invalidated area. Returns no value. */
6203 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6204  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6205  _qzz_addr, _qzz_len, 0, 0, 0)
6206 
6207 
6208 /* These requests are for getting Valgrind itself to print something.
6209  Possibly with a backtrace. This is a really ugly hack. The return value
6210  is the number of characters printed, excluding the "**<pid>** " part at the
6211  start and the backtrace (if present). */
6212 
6213 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6214 /* Modern GCC will optimize the static routine out if unused,
6215  and unused attribute will shut down warnings about it. */
6216 static int VALGRIND_PRINTF(const char *format, ...)
6217  __attribute__((format(__printf__, 1, 2), __unused__));
6218 #endif
6219 static int
6220 #if defined(_MSC_VER)
6221 __inline
6222 #endif
6223 VALGRIND_PRINTF(const char *format, ...)
6224 {
6225 #if defined(NVALGRIND)
6226  return 0;
6227 #else /* NVALGRIND */
6228 #if defined(_MSC_VER) || defined(__MINGW64__)
6229  uintptr_t _qzz_res;
6230 #else
6231  unsigned long _qzz_res;
6232 #endif
6233  va_list vargs;
6234  va_start(vargs, format);
6235 #if defined(_MSC_VER) || defined(__MINGW64__)
6236  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6237  VG_USERREQ__PRINTF_VALIST_BY_REF,
6238  (uintptr_t)format,
6239  (uintptr_t)&vargs,
6240  0, 0, 0);
6241 #else
6242  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6243  VG_USERREQ__PRINTF_VALIST_BY_REF,
6244  (unsigned long)format,
6245  (unsigned long)&vargs,
6246  0, 0, 0);
6247 #endif
6248  va_end(vargs);
6249  return (int)_qzz_res;
6250 #endif /* NVALGRIND */
6251 }
6252 
6253 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6254 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6255  __attribute__((format(__printf__, 1, 2), __unused__));
6256 #endif
6257 static int
6258 #if defined(_MSC_VER)
6259 __inline
6260 #endif
6261 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6262 {
6263 #if defined(NVALGRIND)
6264  return 0;
6265 #else /* NVALGRIND */
6266 #if defined(_MSC_VER) || defined(__MINGW64__)
6267  uintptr_t _qzz_res;
6268 #else
6269  unsigned long _qzz_res;
6270 #endif
6271  va_list vargs;
6272  va_start(vargs, format);
6273 #if defined(_MSC_VER) || defined(__MINGW64__)
6274  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6275  VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6276  (uintptr_t)format,
6277  (uintptr_t)&vargs,
6278  0, 0, 0);
6279 #else
6280  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6281  VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6282  (unsigned long)format,
6283  (unsigned long)&vargs,
6284  0, 0, 0);
6285 #endif
6286  va_end(vargs);
6287  return (int)_qzz_res;
6288 #endif /* NVALGRIND */
6289 }
6290 
6291 
6292 /* These requests allow control to move from the simulated CPU to the
6293  real CPU, calling an arbitrary function.
6294 
6295  Note that the current ThreadId is inserted as the first argument.
6296  So this call:
6297 
6298  VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6299 
6300  requires f to have this signature:
6301 
6302  Word f(Word tid, Word arg1, Word arg2)
6303 
6304  where "Word" is a word-sized type.
6305 
6306  Note that these client requests are not entirely reliable. For example,
6307  if you call a function with them that subsequently calls printf(),
6308  there's a high chance Valgrind will crash. Generally, your prospects of
6309  these working are made higher if the called function does not refer to
6310  any global variables, and does not refer to any libc or other functions
6311  (printf et al). Any kind of entanglement with libc or dynamic linking is
6312  likely to have a bad outcome, for tricky reasons which we've grappled
6313  with a lot in the past.
6314 */
6315 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6316  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6317  VG_USERREQ__CLIENT_CALL0, \
6318  _qyy_fn, \
6319  0, 0, 0, 0)
6320 
6321 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6322  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6323  VG_USERREQ__CLIENT_CALL1, \
6324  _qyy_fn, \
6325  _qyy_arg1, 0, 0, 0)
6326 
6327 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6328  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6329  VG_USERREQ__CLIENT_CALL2, \
6330  _qyy_fn, \
6331  _qyy_arg1, _qyy_arg2, 0, 0)
6332 
6333 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6334  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6335  VG_USERREQ__CLIENT_CALL3, \
6336  _qyy_fn, \
6337  _qyy_arg1, _qyy_arg2, \
6338  _qyy_arg3, 0)
6339 
6340 
6341 /* Counts the number of errors that have been recorded by a tool. Nb:
6342  the tool must record the errors with VG_(maybe_record_error)() or
6343  VG_(unique_error)() for them to be counted. */
6344 #define VALGRIND_COUNT_ERRORS \
6345  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6346  0 /* default return */, \
6347  VG_USERREQ__COUNT_ERRORS, \
6348  0, 0, 0, 0, 0)
6349 
6350 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6351  when heap blocks are allocated in order to give accurate results. This
6352  happens automatically for the standard allocator functions such as
6353  malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6354  delete[], etc.
6355 
6356  But if your program uses a custom allocator, this doesn't automatically
6357  happen, and Valgrind will not do as well. For example, if you allocate
6358  superblocks with mmap() and then allocates chunks of the superblocks, all
6359  Valgrind's observations will be at the mmap() level and it won't know that
6360  the chunks should be considered separate entities. In Memcheck's case,
6361  that means you probably won't get heap block overrun detection (because
6362  there won't be redzones marked as unaddressable) and you definitely won't
6363  get any leak detection.
6364 
6365  The following client requests allow a custom allocator to be annotated so
6366  that it can be handled accurately by Valgrind.
6367 
6368  VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6369  by a malloc()-like function. For Memcheck (an illustrative case), this
6370  does two things:
6371 
6372  - It records that the block has been allocated. This means any addresses
6373  within the block mentioned in error messages will be
6374  identified as belonging to the block. It also means that if the block
6375  isn't freed it will be detected by the leak checker.
6376 
6377  - It marks the block as being addressable and undefined (if 'is_zeroed' is
6378  not set), or addressable and defined (if 'is_zeroed' is set). This
6379  controls how accesses to the block by the program are handled.
6380 
6381  'addr' is the start of the usable block (ie. after any
6382  redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6383  can apply redzones -- these are blocks of padding at the start and end of
6384  each block. Adding redzones is recommended as it makes it much more likely
6385  Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6386  zeroed (or filled with another predictable value), as is the case for
6387  calloc().
6388 
6389  VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6390  heap block -- that will be used by the client program -- is allocated.
6391  It's best to put it at the outermost level of the allocator if possible;
6392  for example, if you have a function my_alloc() which calls
6393  internal_alloc(), and the client request is put inside internal_alloc(),
6394  stack traces relating to the heap block will contain entries for both
6395  my_alloc() and internal_alloc(), which is probably not what you want.
6396 
6397  For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6398  custom blocks from within a heap block, B, that has been allocated with
6399  malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6400  -- the custom blocks will take precedence.
6401 
6402  VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6403  Memcheck, it does two things:
6404 
6405  - It records that the block has been deallocated. This assumes that the
6406  block was annotated as having been allocated via
6407  VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6408 
6409  - It marks the block as being unaddressable.
6410 
6411  VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6412  heap block is deallocated.
6413 
6414  VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6415  Memcheck, it does four things:
6416 
6417  - It records that the size of a block has been changed. This assumes that
6418  the block was annotated as having been allocated via
6419  VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6420 
6421  - If the block shrunk, it marks the freed memory as being unaddressable.
6422 
6423  - If the block grew, it marks the new area as undefined and defines a red
6424  zone past the end of the new block.
6425 
6426  - The V-bits of the overlap between the old and the new block are preserved.
6427 
6428  VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6429  and before deallocation of the old block.
6430 
6431  In many cases, these three client requests will not be enough to get your
6432  allocator working well with Memcheck. More specifically, if your allocator
6433  writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6434  will be necessary to mark the memory as addressable just before the zeroing
6435  occurs, otherwise you'll get a lot of invalid write errors. For example,
6436  you'll need to do this if your allocator recycles freed blocks, but it
6437  zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6438  Alternatively, if your allocator reuses freed blocks for allocator-internal
6439  data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6440 
6441  Really, what's happening is a blurring of the lines between the client
6442  program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6443  memory should be considered unaddressable to the client program, but the
6444  allocator knows more than the rest of the client program and so may be able
6445  to safely access it. Extra client requests are necessary for Valgrind to
6446  understand the distinction between the allocator and the rest of the
6447  program.
6448 
6449  Ignored if addr == 0.
6450 */
6451 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6452  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6453  addr, sizeB, rzB, is_zeroed, 0)
6454 
6455 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6456  Ignored if addr == 0.
6457 */
6458 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6459  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6460  addr, oldSizeB, newSizeB, rzB, 0)
6461 
6462 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6463  Ignored if addr == 0.
6464 */
6465 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6466  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6467  addr, rzB, 0, 0, 0)
6468 
6469 /* Create a memory pool. */
6470 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6471  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6472  pool, rzB, is_zeroed, 0, 0)
6473 
6474 /* Destroy a memory pool. */
6475 #define VALGRIND_DESTROY_MEMPOOL(pool) \
6476  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6477  pool, 0, 0, 0, 0)
6478 
6479 /* Associate a piece of memory with a memory pool. */
6480 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6481  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6482  pool, addr, size, 0, 0)
6483 
6484 /* Disassociate a piece of memory from a memory pool. */
6485 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
6486  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6487  pool, addr, 0, 0, 0)
6488 
6489 /* Disassociate any pieces outside a particular range. */
6490 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6491  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
6492  pool, addr, size, 0, 0)
6493 
6494 /* Resize and/or move a piece associated with a memory pool. */
6495 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
6496  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
6497  poolA, poolB, 0, 0, 0)
6498 
6499 /* Resize and/or move a piece associated with a memory pool. */
6500 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
6501  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
6502  pool, addrA, addrB, size, 0)
6503 
6504 /* Return 1 if a mempool exists, else 0. */
6505 #define VALGRIND_MEMPOOL_EXISTS(pool) \
6506  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6507  VG_USERREQ__MEMPOOL_EXISTS, \
6508  pool, 0, 0, 0, 0)
6509 
6510 /* Mark a piece of memory as being a stack. Returns a stack id.
6511  start is the lowest addressable stack byte, end is the highest
6512  addressable stack byte. */
6513 #define VALGRIND_STACK_REGISTER(start, end) \
6514  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6515  VG_USERREQ__STACK_REGISTER, \
6516  start, end, 0, 0, 0)
6517 
6518 /* Unmark the piece of memory associated with a stack id as being a
6519  stack. */
6520 #define VALGRIND_STACK_DEREGISTER(id) \
6521  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
6522  id, 0, 0, 0, 0)
6523 
6524 /* Change the start and end address of the stack id.
6525  start is the new lowest addressable stack byte, end is the new highest
6526  addressable stack byte. */
6527 #define VALGRIND_STACK_CHANGE(id, start, end) \
6528  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
6529  id, start, end, 0, 0)
6530 
6531 /* Load PDB debug info for Wine PE image_map. */
6532 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
6533  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
6534  fd, ptr, total_size, delta, 0)
6535 
6536 /* Map a code address to a source file name and line number. buf64
6537  must point to a 64-byte buffer in the caller's address space. The
6538  result will be dumped in there and is guaranteed to be zero
6539  terminated. If no info is found, the first byte is set to zero. */
6540 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
6541  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6542  VG_USERREQ__MAP_IP_TO_SRCLOC, \
6543  addr, buf64, 0, 0, 0)
6544 
6545 /* Disable error reporting for this thread. Behaves in a stack like
6546  way, so you can safely call this multiple times provided that
6547  VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
6548  to re-enable reporting. The first call of this macro disables
6549  reporting. Subsequent calls have no effect except to increase the
6550  number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
6551  reporting. Child threads do not inherit this setting from their
6552  parents -- they are always created with reporting enabled. */
6553 #define VALGRIND_DISABLE_ERROR_REPORTING \
6554  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6555  1, 0, 0, 0, 0)
6556 
6557 /* Re-enable error reporting, as per comments on
6558  VALGRIND_DISABLE_ERROR_REPORTING. */
6559 #define VALGRIND_ENABLE_ERROR_REPORTING \
6560  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6561  -1, 0, 0, 0, 0)
6562 
6563 /* Execute a monitor command from the client program.
6564  If a connection is opened with GDB, the output will be sent
6565  according to the output mode set for vgdb.
6566  If no connection is opened, output will go to the log output.
6567  Returns 1 if command not recognized, 0 otherwise. */
6568 #define VALGRIND_MONITOR_COMMAND(command) \
6569  VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
6570  command, 0, 0, 0, 0)
6571 
6572 
6573 #undef PLAT_x86_darwin
6574 #undef PLAT_amd64_darwin
6575 #undef PLAT_x86_win32
6576 #undef PLAT_amd64_win64
6577 #undef PLAT_x86_linux
6578 #undef PLAT_amd64_linux
6579 #undef PLAT_ppc32_linux
6580 #undef PLAT_ppc64be_linux
6581 #undef PLAT_ppc64le_linux
6582 #undef PLAT_arm_linux
6583 #undef PLAT_s390x_linux
6584 #undef PLAT_mips32_linux
6585 #undef PLAT_mips64_linux
6586 
6587 #endif /* __VALGRIND_H */