summaryrefslogtreecommitdiffstats
path: root/3rdparty/valgrind.h
diff options
context:
space:
mode:
Diffstat (limited to '3rdparty/valgrind.h')
-rw-r--r--3rdparty/valgrind.h5960
1 files changed, 5960 insertions, 0 deletions
diff --git a/3rdparty/valgrind.h b/3rdparty/valgrind.h
new file mode 100644
index 0000000..21caa70
--- /dev/null
+++ b/3rdparty/valgrind.h
@@ -0,0 +1,5960 @@
1/* -*- c -*-
2 ----------------------------------------------------------------
3
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
9
10 ----------------------------------------------------------------
11
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
14
15 Copyright (C) 2000-2013 Julian Seward. All rights reserved.
16
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
20
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
23
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
28
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
31
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
35
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47
48 ----------------------------------------------------------------
49
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
54
55 ----------------------------------------------------------------
56*/
57
58
59/* This file is for inclusion into client (your!) code.
60
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
63
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
72
73#ifndef __VALGRIND_H
74#define __VALGRIND_H
75
76
77/* ------------------------------------------------------------------ */
78/* VERSION NUMBER OF VALGRIND */
79/* ------------------------------------------------------------------ */
80
81/* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
86
87#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90*/
91#define __VALGRIND_MAJOR__ 3
92#define __VALGRIND_MINOR__ 10
93
94
95#include <stdarg.h>
96#ifdef HAVE_STDINT_H
97#if HAVE_STDINT_H
98#include <stdint.h>
99#endif
100#endif
101
102
103/* Nb: this file might be included in a file compiled with -ansi. So
104 we can't use C++ style "//" comments nor the "asm" keyword (instead
105 use "__asm__"). */
106
107/* Derive some tags indicating what the target platform is. Note
108 that in this file we're using the compiler's CPP symbols for
109 identifying architectures, which are different to the ones we use
110 within the rest of Valgrind. Note, __powerpc__ is active for both
111 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
112 latter (on Linux, that is).
113
114 Misc note: how to find out what's predefined in gcc by default:
115 gcc -Wp,-dM somefile.c
116*/
117#undef PLAT_x86_darwin
118#undef PLAT_amd64_darwin
119#undef PLAT_x86_win32
120#undef PLAT_amd64_win64
121#undef PLAT_x86_linux
122#undef PLAT_amd64_linux
123#undef PLAT_ppc32_linux
124#undef PLAT_ppc64_linux
125#undef PLAT_arm_linux
126#undef PLAT_arm64_linux
127#undef PLAT_s390x_linux
128#undef PLAT_mips32_linux
129#undef PLAT_mips64_linux
130
131
132#if defined(__APPLE__) && defined(__i386__)
133# define PLAT_x86_darwin 1
134#elif defined(__APPLE__) && defined(__x86_64__)
135# define PLAT_amd64_darwin 1
136#elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
137 || defined(__CYGWIN32__) \
138 || (defined(_WIN32) && defined(_M_IX86))
139# define PLAT_x86_win32 1
140#elif defined(__MINGW64__) \
141 || (defined(_WIN64) && defined(_M_X64))
142# define PLAT_amd64_win64 1
143#elif defined(__linux__) && defined(__i386__)
144# define PLAT_x86_linux 1
145#elif defined(__linux__) && defined(__x86_64__)
146# define PLAT_amd64_linux 1
147#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
148# define PLAT_ppc32_linux 1
149#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
150# define PLAT_ppc64_linux 1
151#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
152# define PLAT_arm_linux 1
153#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
154# define PLAT_arm64_linux 1
155#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
156# define PLAT_s390x_linux 1
157#elif defined(__linux__) && defined(__mips__) && (__mips==64)
158# define PLAT_mips64_linux 1
159#elif defined(__linux__) && defined(__mips__) && (__mips!=64)
160# define PLAT_mips32_linux 1
161#else
162/* If we're not compiling for our target platform, don't generate
163 any inline asms. */
164# if !defined(NVALGRIND)
165# define NVALGRIND 1
166# endif
167#endif
168
169/* XXX: Unfortunately x64 Visual C++ does not suport inline asms,
170 * so disable the use of valgrind's inline asm's for x64 Visual C++
171 * builds, so that x64 Visual C++ builds of GLib can be maintained
172 */
173#if defined (PLAT_amd64_win64) && defined (_MSC_VER)
174# if !defined(NVALGRIND)
175# define NVALGRIND 1
176# endif
177#endif
178
179
180/* ------------------------------------------------------------------ */
181/* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
182/* in here of use to end-users -- skip to the next section. */
183/* ------------------------------------------------------------------ */
184
185/*
186 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
187 * request. Accepts both pointers and integers as arguments.
188 *
189 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
190 * client request that does not return a value.
191
192 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
193 * client request and whose value equals the client request result. Accepts
194 * both pointers and integers as arguments. Note that such calls are not
195 * necessarily pure functions -- they may have side effects.
196 */
197
198#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
199 _zzq_request, _zzq_arg1, _zzq_arg2, \
200 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
201 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
202 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
203 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
204
205#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
206 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
207 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
208 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
209 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
210
211#if defined(NVALGRIND)
212
213/* Define NVALGRIND to completely remove the Valgrind magic sequence
214 from the compiled code (analogous to NDEBUG's effects on
215 assert()) */
216#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
217 _zzq_default, _zzq_request, \
218 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
219 (_zzq_default)
220
221#else /* ! NVALGRIND */
222
223/* The following defines the magic code sequences which the JITter
224 spots and handles magically. Don't look too closely at them as
225 they will rot your brain.
226
227 The assembly code sequences for all architectures is in this one
228 file. This is because this file must be stand-alone, and we don't
229 want to have multiple files.
230
231 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
232 value gets put in the return slot, so that everything works when
233 this is executed not under Valgrind. Args are passed in a memory
234 block, and so there's no intrinsic limit to the number that could
235 be passed, but it's currently five.
236
237 The macro args are:
238 _zzq_rlval result lvalue
239 _zzq_default default value (result returned when running on real CPU)
240 _zzq_request request code
241 _zzq_arg1..5 request params
242
243 The other two macros are used to support function wrapping, and are
244 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
245 guest's NRADDR pseudo-register and whatever other information is
246 needed to safely run the call original from the wrapper: on
247 ppc64-linux, the R2 value at the divert point is also needed. This
248 information is abstracted into a user-visible type, OrigFn.
249
250 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
251 guest, but guarantees that the branch instruction will not be
252 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
253 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
254 complete inline asm, since it needs to be combined with more magic
255 inline asm stuff to be useful.
256*/
257
258/* ------------------------- x86-{linux,darwin} ---------------- */
259
260#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
261 || (defined(PLAT_x86_win32) && defined(__GNUC__))
262
263typedef
264 struct {
265 unsigned int nraddr; /* where's the code? */
266 }
267 OrigFn;
268
269#define __SPECIAL_INSTRUCTION_PREAMBLE \
270 "roll $3, %%edi ; roll $13, %%edi\n\t" \
271 "roll $29, %%edi ; roll $19, %%edi\n\t"
272
273#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
274 _zzq_default, _zzq_request, \
275 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
276 __extension__ \
277 ({volatile unsigned int _zzq_args[6]; \
278 volatile unsigned int _zzq_result; \
279 _zzq_args[0] = (unsigned int)(_zzq_request); \
280 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
281 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
282 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
283 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
284 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
285 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
286 /* %EDX = client_request ( %EAX ) */ \
287 "xchgl %%ebx,%%ebx" \
288 : "=d" (_zzq_result) \
289 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
290 : "cc", "memory" \
291 ); \
292 _zzq_result; \
293 })
294
295#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
296 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
297 volatile unsigned int __addr; \
298 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
299 /* %EAX = guest_NRADDR */ \
300 "xchgl %%ecx,%%ecx" \
301 : "=a" (__addr) \
302 : \
303 : "cc", "memory" \
304 ); \
305 _zzq_orig->nraddr = __addr; \
306 }
307
308#define VALGRIND_CALL_NOREDIR_EAX \
309 __SPECIAL_INSTRUCTION_PREAMBLE \
310 /* call-noredir *%EAX */ \
311 "xchgl %%edx,%%edx\n\t"
312
313#define VALGRIND_VEX_INJECT_IR() \
314 do { \
315 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
316 "xchgl %%edi,%%edi\n\t" \
317 : : : "cc", "memory" \
318 ); \
319 } while (0)
320
321#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
322
323/* ------------------------- x86-Win32 ------------------------- */
324
325#if defined(PLAT_x86_win32) && !defined(__GNUC__)
326
327typedef
328 struct {
329 unsigned int nraddr; /* where's the code? */
330 }
331 OrigFn;
332
333#if defined(_MSC_VER)
334
335#define __SPECIAL_INSTRUCTION_PREAMBLE \
336 __asm rol edi, 3 __asm rol edi, 13 \
337 __asm rol edi, 29 __asm rol edi, 19
338
339#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
340 _zzq_default, _zzq_request, \
341 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
342 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
343 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
344 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
345 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
346
347static __inline uintptr_t
348valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
349 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
350 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
351 uintptr_t _zzq_arg5)
352{
353 volatile uintptr_t _zzq_args[6];
354 volatile unsigned int _zzq_result;
355 _zzq_args[0] = (uintptr_t)(_zzq_request);
356 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
357 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
358 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
359 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
360 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
361 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
362 __SPECIAL_INSTRUCTION_PREAMBLE
363 /* %EDX = client_request ( %EAX ) */
364 __asm xchg ebx,ebx
365 __asm mov _zzq_result, edx
366 }
367 return _zzq_result;
368}
369
370#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
371 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
372 volatile unsigned int __addr; \
373 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
374 /* %EAX = guest_NRADDR */ \
375 __asm xchg ecx,ecx \
376 __asm mov __addr, eax \
377 } \
378 _zzq_orig->nraddr = __addr; \
379 }
380
381#define VALGRIND_CALL_NOREDIR_EAX ERROR
382
383#define VALGRIND_VEX_INJECT_IR() \
384 do { \
385 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
386 __asm xchg edi,edi \
387 } \
388 } while (0)
389
390#else
391#error Unsupported compiler.
392#endif
393
394#endif /* PLAT_x86_win32 */
395
396/* ------------------------ amd64-{linux,darwin} --------------- */
397
398#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
399 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
400
401typedef
402 struct {
403 unsigned long long int nraddr; /* where's the code? */
404 }
405 OrigFn;
406
407#define __SPECIAL_INSTRUCTION_PREAMBLE \
408 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
409 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
410
411#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
412 _zzq_default, _zzq_request, \
413 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
414 __extension__ \
415 ({ volatile unsigned long long int _zzq_args[6]; \
416 volatile unsigned long long int _zzq_result; \
417 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
418 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
419 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
420 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
421 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
422 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
423 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
424 /* %RDX = client_request ( %RAX ) */ \
425 "xchgq %%rbx,%%rbx" \
426 : "=d" (_zzq_result) \
427 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
428 : "cc", "memory" \
429 ); \
430 _zzq_result; \
431 })
432
433#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
434 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
435 volatile unsigned long long int __addr; \
436 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
437 /* %RAX = guest_NRADDR */ \
438 "xchgq %%rcx,%%rcx" \
439 : "=a" (__addr) \
440 : \
441 : "cc", "memory" \
442 ); \
443 _zzq_orig->nraddr = __addr; \
444 }
445
446#define VALGRIND_CALL_NOREDIR_RAX \
447 __SPECIAL_INSTRUCTION_PREAMBLE \
448 /* call-noredir *%RAX */ \
449 "xchgq %%rdx,%%rdx\n\t"
450
451#define VALGRIND_VEX_INJECT_IR() \
452 do { \
453 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
454 "xchgq %%rdi,%%rdi\n\t" \
455 : : : "cc", "memory" \
456 ); \
457 } while (0)
458
459#endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
460
461/* ------------------------- amd64-Win64 ------------------------- */
462
463#if defined(PLAT_amd64_win64) && !defined(__GNUC__)
464
465#error Unsupported compiler.
466
467#endif /* PLAT_amd64_win64 */
468
469/* ------------------------ ppc32-linux ------------------------ */
470
471#if defined(PLAT_ppc32_linux)
472
473typedef
474 struct {
475 unsigned int nraddr; /* where's the code? */
476 }
477 OrigFn;
478
479#define __SPECIAL_INSTRUCTION_PREAMBLE \
480 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
481 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
482
483#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
484 _zzq_default, _zzq_request, \
485 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
486 \
487 __extension__ \
488 ({ unsigned int _zzq_args[6]; \
489 unsigned int _zzq_result; \
490 unsigned int* _zzq_ptr; \
491 _zzq_args[0] = (unsigned int)(_zzq_request); \
492 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
493 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
494 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
495 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
496 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
497 _zzq_ptr = _zzq_args; \
498 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
499 "mr 4,%2\n\t" /*ptr*/ \
500 __SPECIAL_INSTRUCTION_PREAMBLE \
501 /* %R3 = client_request ( %R4 ) */ \
502 "or 1,1,1\n\t" \
503 "mr %0,3" /*result*/ \
504 : "=b" (_zzq_result) \
505 : "b" (_zzq_default), "b" (_zzq_ptr) \
506 : "cc", "memory", "r3", "r4"); \
507 _zzq_result; \
508 })
509
510#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
511 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
512 unsigned int __addr; \
513 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
514 /* %R3 = guest_NRADDR */ \
515 "or 2,2,2\n\t" \
516 "mr %0,3" \
517 : "=b" (__addr) \
518 : \
519 : "cc", "memory", "r3" \
520 ); \
521 _zzq_orig->nraddr = __addr; \
522 }
523
524#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
525 __SPECIAL_INSTRUCTION_PREAMBLE \
526 /* branch-and-link-to-noredir *%R11 */ \
527 "or 3,3,3\n\t"
528
529#define VALGRIND_VEX_INJECT_IR() \
530 do { \
531 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
532 "or 5,5,5\n\t" \
533 ); \
534 } while (0)
535
536#endif /* PLAT_ppc32_linux */
537
538/* ------------------------ ppc64-linux ------------------------ */
539
540#if defined(PLAT_ppc64_linux)
541
542typedef
543 struct {
544 unsigned long long int nraddr; /* where's the code? */
545 unsigned long long int r2; /* what tocptr do we need? */
546 }
547 OrigFn;
548
549#define __SPECIAL_INSTRUCTION_PREAMBLE \
550 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
551 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
552
553#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
554 _zzq_default, _zzq_request, \
555 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
556 \
557 __extension__ \
558 ({ unsigned long long int _zzq_args[6]; \
559 unsigned long long int _zzq_result; \
560 unsigned long long int* _zzq_ptr; \
561 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
562 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
563 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
564 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
565 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
566 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
567 _zzq_ptr = _zzq_args; \
568 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
569 "mr 4,%2\n\t" /*ptr*/ \
570 __SPECIAL_INSTRUCTION_PREAMBLE \
571 /* %R3 = client_request ( %R4 ) */ \
572 "or 1,1,1\n\t" \
573 "mr %0,3" /*result*/ \
574 : "=b" (_zzq_result) \
575 : "b" (_zzq_default), "b" (_zzq_ptr) \
576 : "cc", "memory", "r3", "r4"); \
577 _zzq_result; \
578 })
579
580#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
581 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
582 unsigned long long int __addr; \
583 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
584 /* %R3 = guest_NRADDR */ \
585 "or 2,2,2\n\t" \
586 "mr %0,3" \
587 : "=b" (__addr) \
588 : \
589 : "cc", "memory", "r3" \
590 ); \
591 _zzq_orig->nraddr = __addr; \
592 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
593 /* %R3 = guest_NRADDR_GPR2 */ \
594 "or 4,4,4\n\t" \
595 "mr %0,3" \
596 : "=b" (__addr) \
597 : \
598 : "cc", "memory", "r3" \
599 ); \
600 _zzq_orig->r2 = __addr; \
601 }
602
603#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
604 __SPECIAL_INSTRUCTION_PREAMBLE \
605 /* branch-and-link-to-noredir *%R11 */ \
606 "or 3,3,3\n\t"
607
608#define VALGRIND_VEX_INJECT_IR() \
609 do { \
610 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
611 "or 5,5,5\n\t" \
612 ); \
613 } while (0)
614
615#endif /* PLAT_ppc64_linux */
616
617/* ------------------------- arm-linux ------------------------- */
618
619#if defined(PLAT_arm_linux)
620
621typedef
622 struct {
623 unsigned int nraddr; /* where's the code? */
624 }
625 OrigFn;
626
627#define __SPECIAL_INSTRUCTION_PREAMBLE \
628 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
629 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
630
631#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
632 _zzq_default, _zzq_request, \
633 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
634 \
635 __extension__ \
636 ({volatile unsigned int _zzq_args[6]; \
637 volatile unsigned int _zzq_result; \
638 _zzq_args[0] = (unsigned int)(_zzq_request); \
639 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
640 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
641 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
642 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
643 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
644 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
645 "mov r4, %2\n\t" /*ptr*/ \
646 __SPECIAL_INSTRUCTION_PREAMBLE \
647 /* R3 = client_request ( R4 ) */ \
648 "orr r10, r10, r10\n\t" \
649 "mov %0, r3" /*result*/ \
650 : "=r" (_zzq_result) \
651 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
652 : "cc","memory", "r3", "r4"); \
653 _zzq_result; \
654 })
655
656#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
657 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
658 unsigned int __addr; \
659 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
660 /* R3 = guest_NRADDR */ \
661 "orr r11, r11, r11\n\t" \
662 "mov %0, r3" \
663 : "=r" (__addr) \
664 : \
665 : "cc", "memory", "r3" \
666 ); \
667 _zzq_orig->nraddr = __addr; \
668 }
669
670#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
671 __SPECIAL_INSTRUCTION_PREAMBLE \
672 /* branch-and-link-to-noredir *%R4 */ \
673 "orr r12, r12, r12\n\t"
674
675#define VALGRIND_VEX_INJECT_IR() \
676 do { \
677 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
678 "orr r9, r9, r9\n\t" \
679 : : : "cc", "memory" \
680 ); \
681 } while (0)
682
683#endif /* PLAT_arm_linux */
684
685/* ------------------------ arm64-linux ------------------------- */
686
687#if defined(PLAT_arm64_linux)
688
689typedef
690 struct {
691 unsigned long long int nraddr; /* where's the code? */
692 }
693 OrigFn;
694
695#define __SPECIAL_INSTRUCTION_PREAMBLE \
696 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
697 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
698
699#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
700 _zzq_default, _zzq_request, \
701 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
702 \
703 __extension__ \
704 ({volatile unsigned long long int _zzq_args[6]; \
705 volatile unsigned long long int _zzq_result; \
706 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
707 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
708 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
709 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
710 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
711 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
712 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
713 "mov x4, %2\n\t" /*ptr*/ \
714 __SPECIAL_INSTRUCTION_PREAMBLE \
715 /* X3 = client_request ( X4 ) */ \
716 "orr x10, x10, x10\n\t" \
717 "mov %0, x3" /*result*/ \
718 : "=r" (_zzq_result) \
719 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
720 : "cc","memory", "x3", "x4"); \
721 _zzq_result; \
722 })
723
724#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
725 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
726 unsigned long long int __addr; \
727 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
728 /* X3 = guest_NRADDR */ \
729 "orr x11, x11, x11\n\t" \
730 "mov %0, x3" \
731 : "=r" (__addr) \
732 : \
733 : "cc", "memory", "x3" \
734 ); \
735 _zzq_orig->nraddr = __addr; \
736 }
737
738#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
739 __SPECIAL_INSTRUCTION_PREAMBLE \
740 /* branch-and-link-to-noredir X8 */ \
741 "orr x12, x12, x12\n\t"
742
743#define VALGRIND_VEX_INJECT_IR() \
744 do { \
745 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
746 "orr x9, x9, x9\n\t" \
747 : : : "cc", "memory" \
748 ); \
749 } while (0)
750
751#endif /* PLAT_arm64_linux */
752
753/* ------------------------ s390x-linux ------------------------ */
754
755#if defined(PLAT_s390x_linux)
756
757typedef
758 struct {
759 unsigned long long int nraddr; /* where's the code? */
760 }
761 OrigFn;
762
763/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
764 * code. This detection is implemented in platform specific toIR.c
765 * (e.g. VEX/priv/guest_s390_decoder.c).
766 */
767#define __SPECIAL_INSTRUCTION_PREAMBLE \
768 "lr 15,15\n\t" \
769 "lr 1,1\n\t" \
770 "lr 2,2\n\t" \
771 "lr 3,3\n\t"
772
773#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
774#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
775#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
776#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
777
778#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
779 _zzq_default, _zzq_request, \
780 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
781 __extension__ \
782 ({volatile unsigned long long int _zzq_args[6]; \
783 volatile unsigned long long int _zzq_result; \
784 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
785 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
786 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
787 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
788 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
789 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
790 __asm__ volatile(/* r2 = args */ \
791 "lgr 2,%1\n\t" \
792 /* r3 = default */ \
793 "lgr 3,%2\n\t" \
794 __SPECIAL_INSTRUCTION_PREAMBLE \
795 __CLIENT_REQUEST_CODE \
796 /* results = r3 */ \
797 "lgr %0, 3\n\t" \
798 : "=d" (_zzq_result) \
799 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
800 : "cc", "2", "3", "memory" \
801 ); \
802 _zzq_result; \
803 })
804
805#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
806 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
807 volatile unsigned long long int __addr; \
808 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
809 __GET_NR_CONTEXT_CODE \
810 "lgr %0, 3\n\t" \
811 : "=a" (__addr) \
812 : \
813 : "cc", "3", "memory" \
814 ); \
815 _zzq_orig->nraddr = __addr; \
816 }
817
818#define VALGRIND_CALL_NOREDIR_R1 \
819 __SPECIAL_INSTRUCTION_PREAMBLE \
820 __CALL_NO_REDIR_CODE
821
822#define VALGRIND_VEX_INJECT_IR() \
823 do { \
824 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
825 __VEX_INJECT_IR_CODE); \
826 } while (0)
827
828#endif /* PLAT_s390x_linux */
829
830/* ------------------------- mips32-linux ---------------- */
831
832#if defined(PLAT_mips32_linux)
833
834typedef
835 struct {
836 unsigned int nraddr; /* where's the code? */
837 }
838 OrigFn;
839
840/* .word 0x342
841 * .word 0x742
842 * .word 0xC2
843 * .word 0x4C2*/
844#define __SPECIAL_INSTRUCTION_PREAMBLE \
845 "srl $0, $0, 13\n\t" \
846 "srl $0, $0, 29\n\t" \
847 "srl $0, $0, 3\n\t" \
848 "srl $0, $0, 19\n\t"
849
850#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
851 _zzq_default, _zzq_request, \
852 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
853 __extension__ \
854 ({ volatile unsigned int _zzq_args[6]; \
855 volatile unsigned int _zzq_result; \
856 _zzq_args[0] = (unsigned int)(_zzq_request); \
857 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
858 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
859 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
860 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
861 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
862 __asm__ volatile("move $11, %1\n\t" /*default*/ \
863 "move $12, %2\n\t" /*ptr*/ \
864 __SPECIAL_INSTRUCTION_PREAMBLE \
865 /* T3 = client_request ( T4 ) */ \
866 "or $13, $13, $13\n\t" \
867 "move %0, $11\n\t" /*result*/ \
868 : "=r" (_zzq_result) \
869 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
870 : "$11", "$12"); \
871 _zzq_result; \
872 })
873
874#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
875 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
876 volatile unsigned int __addr; \
877 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
878 /* %t9 = guest_NRADDR */ \
879 "or $14, $14, $14\n\t" \
880 "move %0, $11" /*result*/ \
881 : "=r" (__addr) \
882 : \
883 : "$11" \
884 ); \
885 _zzq_orig->nraddr = __addr; \
886 }
887
888#define VALGRIND_CALL_NOREDIR_T9 \
889 __SPECIAL_INSTRUCTION_PREAMBLE \
890 /* call-noredir *%t9 */ \
891 "or $15, $15, $15\n\t"
892
893#define VALGRIND_VEX_INJECT_IR() \
894 do { \
895 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
896 "or $11, $11, $11\n\t" \
897 ); \
898 } while (0)
899
900
901#endif /* PLAT_mips32_linux */
902
903/* ------------------------- mips64-linux ---------------- */
904
905#if defined(PLAT_mips64_linux)
906
907typedef
908 struct {
909 unsigned long long nraddr; /* where's the code? */
910 }
911 OrigFn;
912
913/* dsll $0,$0, 3
914 * dsll $0,$0, 13
915 * dsll $0,$0, 29
916 * dsll $0,$0, 19*/
917#define __SPECIAL_INSTRUCTION_PREAMBLE \
918 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
919 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
920
921#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
922 _zzq_default, _zzq_request, \
923 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
924 __extension__ \
925 ({ volatile unsigned long long int _zzq_args[6]; \
926 volatile unsigned long long int _zzq_result; \
927 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
928 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
929 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
930 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
931 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
932 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
933 __asm__ volatile("move $11, %1\n\t" /*default*/ \
934 "move $12, %2\n\t" /*ptr*/ \
935 __SPECIAL_INSTRUCTION_PREAMBLE \
936 /* $11 = client_request ( $12 ) */ \
937 "or $13, $13, $13\n\t" \
938 "move %0, $11\n\t" /*result*/ \
939 : "=r" (_zzq_result) \
940 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
941 : "$11", "$12"); \
942 _zzq_result; \
943 })
944
945#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
946 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
947 volatile unsigned long long int __addr; \
948 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
949 /* $11 = guest_NRADDR */ \
950 "or $14, $14, $14\n\t" \
951 "move %0, $11" /*result*/ \
952 : "=r" (__addr) \
953 : \
954 : "$11"); \
955 _zzq_orig->nraddr = __addr; \
956 }
957
958#define VALGRIND_CALL_NOREDIR_T9 \
959 __SPECIAL_INSTRUCTION_PREAMBLE \
960 /* call-noredir $25 */ \
961 "or $15, $15, $15\n\t"
962
963#define VALGRIND_VEX_INJECT_IR() \
964 do { \
965 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
966 "or $11, $11, $11\n\t" \
967 ); \
968 } while (0)
969
970#endif /* PLAT_mips64_linux */
971
972/* Insert assembly code for other platforms here... */
973
974#endif /* NVALGRIND */
975
976
977/* ------------------------------------------------------------------ */
978/* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
979/* ugly. It's the least-worst tradeoff I can think of. */
980/* ------------------------------------------------------------------ */
981
982/* This section defines magic (a.k.a appalling-hack) macros for doing
983 guaranteed-no-redirection macros, so as to get from function
984 wrappers to the functions they are wrapping. The whole point is to
985 construct standard call sequences, but to do the call itself with a
986 special no-redirect call pseudo-instruction that the JIT
987 understands and handles specially. This section is long and
988 repetitious, and I can't see a way to make it shorter.
989
990 The naming scheme is as follows:
991
992 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
993
994 'W' stands for "word" and 'v' for "void". Hence there are
995 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
996 and for each, the possibility of returning a word-typed result, or
997 no result.
998*/
999
1000/* Use these to write the name of your wrapper. NOTE: duplicates
1001 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1002 the default behaviour equivalance class tag "0000" into the name.
1003 See pub_tool_redir.h for details -- normally you don't need to
1004 think about this, though. */
1005
1006/* Use an extra level of macroisation so as to ensure the soname/fnname
1007 args are fully macro-expanded before pasting them together. */
1008#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1009
1010#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1011 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1012
1013#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1014 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1015
1016/* Use this macro from within a wrapper function to collect the
1017 context (address and possibly other info) of the original function.
1018 Once you have that you can then use it in one of the CALL_FN_
1019 macros. The type of the argument _lval is OrigFn. */
1020#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1021
1022/* Also provide end-user facilities for function replacement, rather
1023 than wrapping. A replacement function differs from a wrapper in
1024 that it has no way to get hold of the original function being
1025 called, and hence no way to call onwards to it. In a replacement
1026 function, VALGRIND_GET_ORIG_FN always returns zero. */
1027
1028#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1029 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1030
1031#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1032 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1033
1034/* Derivatives of the main macros below, for calling functions
1035 returning void. */
1036
1037#define CALL_FN_v_v(fnptr) \
1038 do { volatile unsigned long _junk; \
1039 CALL_FN_W_v(_junk,fnptr); } while (0)
1040
1041#define CALL_FN_v_W(fnptr, arg1) \
1042 do { volatile unsigned long _junk; \
1043 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1044
1045#define CALL_FN_v_WW(fnptr, arg1,arg2) \
1046 do { volatile unsigned long _junk; \
1047 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1048
1049#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1050 do { volatile unsigned long _junk; \
1051 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1052
1053#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1054 do { volatile unsigned long _junk; \
1055 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1056
1057#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1058 do { volatile unsigned long _junk; \
1059 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1060
1061#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1062 do { volatile unsigned long _junk; \
1063 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1064
1065#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1066 do { volatile unsigned long _junk; \
1067 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1068
1069/* ------------------------- x86-{linux,darwin} ---------------- */
1070
1071#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1072
1073/* These regs are trashed by the hidden call. No need to mention eax
1074 as gcc can already see that, plus causes gcc to bomb. */
1075#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1076
1077/* Macros to save and align the stack before making a function
1078 call and restore it afterwards as gcc may not keep the stack
1079 pointer aligned if it doesn't realise calls are being made
1080 to other functions. */
1081
1082#define VALGRIND_ALIGN_STACK \
1083 "movl %%esp,%%edi\n\t" \
1084 "andl $0xfffffff0,%%esp\n\t"
1085#define VALGRIND_RESTORE_STACK \
1086 "movl %%edi,%%esp\n\t"
1087
1088/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1089 long) == 4. */
1090
1091#define CALL_FN_W_v(lval, orig) \
1092 do { \
1093 volatile OrigFn _orig = (orig); \
1094 volatile unsigned long _argvec[1]; \
1095 volatile unsigned long _res; \
1096 _argvec[0] = (unsigned long)_orig.nraddr; \
1097 __asm__ volatile( \
1098 VALGRIND_ALIGN_STACK \
1099 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1100 VALGRIND_CALL_NOREDIR_EAX \
1101 VALGRIND_RESTORE_STACK \
1102 : /*out*/ "=a" (_res) \
1103 : /*in*/ "a" (&_argvec[0]) \
1104 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1105 ); \
1106 lval = (__typeof__(lval)) _res; \
1107 } while (0)
1108
1109#define CALL_FN_W_W(lval, orig, arg1) \
1110 do { \
1111 volatile OrigFn _orig = (orig); \
1112 volatile unsigned long _argvec[2]; \
1113 volatile unsigned long _res; \
1114 _argvec[0] = (unsigned long)_orig.nraddr; \
1115 _argvec[1] = (unsigned long)(arg1); \
1116 __asm__ volatile( \
1117 VALGRIND_ALIGN_STACK \
1118 "subl $12, %%esp\n\t" \
1119 "pushl 4(%%eax)\n\t" \
1120 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1121 VALGRIND_CALL_NOREDIR_EAX \
1122 VALGRIND_RESTORE_STACK \
1123 : /*out*/ "=a" (_res) \
1124 : /*in*/ "a" (&_argvec[0]) \
1125 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1126 ); \
1127 lval = (__typeof__(lval)) _res; \
1128 } while (0)
1129
1130#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1131 do { \
1132 volatile OrigFn _orig = (orig); \
1133 volatile unsigned long _argvec[3]; \
1134 volatile unsigned long _res; \
1135 _argvec[0] = (unsigned long)_orig.nraddr; \
1136 _argvec[1] = (unsigned long)(arg1); \
1137 _argvec[2] = (unsigned long)(arg2); \
1138 __asm__ volatile( \
1139 VALGRIND_ALIGN_STACK \
1140 "subl $8, %%esp\n\t" \
1141 "pushl 8(%%eax)\n\t" \
1142 "pushl 4(%%eax)\n\t" \
1143 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1144 VALGRIND_CALL_NOREDIR_EAX \
1145 VALGRIND_RESTORE_STACK \
1146 : /*out*/ "=a" (_res) \
1147 : /*in*/ "a" (&_argvec[0]) \
1148 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1149 ); \
1150 lval = (__typeof__(lval)) _res; \
1151 } while (0)
1152
1153#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1154 do { \
1155 volatile OrigFn _orig = (orig); \
1156 volatile unsigned long _argvec[4]; \
1157 volatile unsigned long _res; \
1158 _argvec[0] = (unsigned long)_orig.nraddr; \
1159 _argvec[1] = (unsigned long)(arg1); \
1160 _argvec[2] = (unsigned long)(arg2); \
1161 _argvec[3] = (unsigned long)(arg3); \
1162 __asm__ volatile( \
1163 VALGRIND_ALIGN_STACK \
1164 "subl $4, %%esp\n\t" \
1165 "pushl 12(%%eax)\n\t" \
1166 "pushl 8(%%eax)\n\t" \
1167 "pushl 4(%%eax)\n\t" \
1168 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1169 VALGRIND_CALL_NOREDIR_EAX \
1170 VALGRIND_RESTORE_STACK \
1171 : /*out*/ "=a" (_res) \
1172 : /*in*/ "a" (&_argvec[0]) \
1173 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1174 ); \
1175 lval = (__typeof__(lval)) _res; \
1176 } while (0)
1177
1178#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1179 do { \
1180 volatile OrigFn _orig = (orig); \
1181 volatile unsigned long _argvec[5]; \
1182 volatile unsigned long _res; \
1183 _argvec[0] = (unsigned long)_orig.nraddr; \
1184 _argvec[1] = (unsigned long)(arg1); \
1185 _argvec[2] = (unsigned long)(arg2); \
1186 _argvec[3] = (unsigned long)(arg3); \
1187 _argvec[4] = (unsigned long)(arg4); \
1188 __asm__ volatile( \
1189 VALGRIND_ALIGN_STACK \
1190 "pushl 16(%%eax)\n\t" \
1191 "pushl 12(%%eax)\n\t" \
1192 "pushl 8(%%eax)\n\t" \
1193 "pushl 4(%%eax)\n\t" \
1194 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1195 VALGRIND_CALL_NOREDIR_EAX \
1196 VALGRIND_RESTORE_STACK \
1197 : /*out*/ "=a" (_res) \
1198 : /*in*/ "a" (&_argvec[0]) \
1199 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1200 ); \
1201 lval = (__typeof__(lval)) _res; \
1202 } while (0)
1203
1204#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1205 do { \
1206 volatile OrigFn _orig = (orig); \
1207 volatile unsigned long _argvec[6]; \
1208 volatile unsigned long _res; \
1209 _argvec[0] = (unsigned long)_orig.nraddr; \
1210 _argvec[1] = (unsigned long)(arg1); \
1211 _argvec[2] = (unsigned long)(arg2); \
1212 _argvec[3] = (unsigned long)(arg3); \
1213 _argvec[4] = (unsigned long)(arg4); \
1214 _argvec[5] = (unsigned long)(arg5); \
1215 __asm__ volatile( \
1216 VALGRIND_ALIGN_STACK \
1217 "subl $12, %%esp\n\t" \
1218 "pushl 20(%%eax)\n\t" \
1219 "pushl 16(%%eax)\n\t" \
1220 "pushl 12(%%eax)\n\t" \
1221 "pushl 8(%%eax)\n\t" \
1222 "pushl 4(%%eax)\n\t" \
1223 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1224 VALGRIND_CALL_NOREDIR_EAX \
1225 VALGRIND_RESTORE_STACK \
1226 : /*out*/ "=a" (_res) \
1227 : /*in*/ "a" (&_argvec[0]) \
1228 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1229 ); \
1230 lval = (__typeof__(lval)) _res; \
1231 } while (0)
1232
1233#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1234 do { \
1235 volatile OrigFn _orig = (orig); \
1236 volatile unsigned long _argvec[7]; \
1237 volatile unsigned long _res; \
1238 _argvec[0] = (unsigned long)_orig.nraddr; \
1239 _argvec[1] = (unsigned long)(arg1); \
1240 _argvec[2] = (unsigned long)(arg2); \
1241 _argvec[3] = (unsigned long)(arg3); \
1242 _argvec[4] = (unsigned long)(arg4); \
1243 _argvec[5] = (unsigned long)(arg5); \
1244 _argvec[6] = (unsigned long)(arg6); \
1245 __asm__ volatile( \
1246 VALGRIND_ALIGN_STACK \
1247 "subl $8, %%esp\n\t" \
1248 "pushl 24(%%eax)\n\t" \
1249 "pushl 20(%%eax)\n\t" \
1250 "pushl 16(%%eax)\n\t" \
1251 "pushl 12(%%eax)\n\t" \
1252 "pushl 8(%%eax)\n\t" \
1253 "pushl 4(%%eax)\n\t" \
1254 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1255 VALGRIND_CALL_NOREDIR_EAX \
1256 VALGRIND_RESTORE_STACK \
1257 : /*out*/ "=a" (_res) \
1258 : /*in*/ "a" (&_argvec[0]) \
1259 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1260 ); \
1261 lval = (__typeof__(lval)) _res; \
1262 } while (0)
1263
1264#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1265 arg7) \
1266 do { \
1267 volatile OrigFn _orig = (orig); \
1268 volatile unsigned long _argvec[8]; \
1269 volatile unsigned long _res; \
1270 _argvec[0] = (unsigned long)_orig.nraddr; \
1271 _argvec[1] = (unsigned long)(arg1); \
1272 _argvec[2] = (unsigned long)(arg2); \
1273 _argvec[3] = (unsigned long)(arg3); \
1274 _argvec[4] = (unsigned long)(arg4); \
1275 _argvec[5] = (unsigned long)(arg5); \
1276 _argvec[6] = (unsigned long)(arg6); \
1277 _argvec[7] = (unsigned long)(arg7); \
1278 __asm__ volatile( \
1279 VALGRIND_ALIGN_STACK \
1280 "subl $4, %%esp\n\t" \
1281 "pushl 28(%%eax)\n\t" \
1282 "pushl 24(%%eax)\n\t" \
1283 "pushl 20(%%eax)\n\t" \
1284 "pushl 16(%%eax)\n\t" \
1285 "pushl 12(%%eax)\n\t" \
1286 "pushl 8(%%eax)\n\t" \
1287 "pushl 4(%%eax)\n\t" \
1288 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1289 VALGRIND_CALL_NOREDIR_EAX \
1290 VALGRIND_RESTORE_STACK \
1291 : /*out*/ "=a" (_res) \
1292 : /*in*/ "a" (&_argvec[0]) \
1293 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1294 ); \
1295 lval = (__typeof__(lval)) _res; \
1296 } while (0)
1297
1298#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1299 arg7,arg8) \
1300 do { \
1301 volatile OrigFn _orig = (orig); \
1302 volatile unsigned long _argvec[9]; \
1303 volatile unsigned long _res; \
1304 _argvec[0] = (unsigned long)_orig.nraddr; \
1305 _argvec[1] = (unsigned long)(arg1); \
1306 _argvec[2] = (unsigned long)(arg2); \
1307 _argvec[3] = (unsigned long)(arg3); \
1308 _argvec[4] = (unsigned long)(arg4); \
1309 _argvec[5] = (unsigned long)(arg5); \
1310 _argvec[6] = (unsigned long)(arg6); \
1311 _argvec[7] = (unsigned long)(arg7); \
1312 _argvec[8] = (unsigned long)(arg8); \
1313 __asm__ volatile( \
1314 VALGRIND_ALIGN_STACK \
1315 "pushl 32(%%eax)\n\t" \
1316 "pushl 28(%%eax)\n\t" \
1317 "pushl 24(%%eax)\n\t" \
1318 "pushl 20(%%eax)\n\t" \
1319 "pushl 16(%%eax)\n\t" \
1320 "pushl 12(%%eax)\n\t" \
1321 "pushl 8(%%eax)\n\t" \
1322 "pushl 4(%%eax)\n\t" \
1323 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1324 VALGRIND_CALL_NOREDIR_EAX \
1325 VALGRIND_RESTORE_STACK \
1326 : /*out*/ "=a" (_res) \
1327 : /*in*/ "a" (&_argvec[0]) \
1328 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1329 ); \
1330 lval = (__typeof__(lval)) _res; \
1331 } while (0)
1332
1333#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1334 arg7,arg8,arg9) \
1335 do { \
1336 volatile OrigFn _orig = (orig); \
1337 volatile unsigned long _argvec[10]; \
1338 volatile unsigned long _res; \
1339 _argvec[0] = (unsigned long)_orig.nraddr; \
1340 _argvec[1] = (unsigned long)(arg1); \
1341 _argvec[2] = (unsigned long)(arg2); \
1342 _argvec[3] = (unsigned long)(arg3); \
1343 _argvec[4] = (unsigned long)(arg4); \
1344 _argvec[5] = (unsigned long)(arg5); \
1345 _argvec[6] = (unsigned long)(arg6); \
1346 _argvec[7] = (unsigned long)(arg7); \
1347 _argvec[8] = (unsigned long)(arg8); \
1348 _argvec[9] = (unsigned long)(arg9); \
1349 __asm__ volatile( \
1350 VALGRIND_ALIGN_STACK \
1351 "subl $12, %%esp\n\t" \
1352 "pushl 36(%%eax)\n\t" \
1353 "pushl 32(%%eax)\n\t" \
1354 "pushl 28(%%eax)\n\t" \
1355 "pushl 24(%%eax)\n\t" \
1356 "pushl 20(%%eax)\n\t" \
1357 "pushl 16(%%eax)\n\t" \
1358 "pushl 12(%%eax)\n\t" \
1359 "pushl 8(%%eax)\n\t" \
1360 "pushl 4(%%eax)\n\t" \
1361 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1362 VALGRIND_CALL_NOREDIR_EAX \
1363 VALGRIND_RESTORE_STACK \
1364 : /*out*/ "=a" (_res) \
1365 : /*in*/ "a" (&_argvec[0]) \
1366 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1367 ); \
1368 lval = (__typeof__(lval)) _res; \
1369 } while (0)
1370
1371#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1372 arg7,arg8,arg9,arg10) \
1373 do { \
1374 volatile OrigFn _orig = (orig); \
1375 volatile unsigned long _argvec[11]; \
1376 volatile unsigned long _res; \
1377 _argvec[0] = (unsigned long)_orig.nraddr; \
1378 _argvec[1] = (unsigned long)(arg1); \
1379 _argvec[2] = (unsigned long)(arg2); \
1380 _argvec[3] = (unsigned long)(arg3); \
1381 _argvec[4] = (unsigned long)(arg4); \
1382 _argvec[5] = (unsigned long)(arg5); \
1383 _argvec[6] = (unsigned long)(arg6); \
1384 _argvec[7] = (unsigned long)(arg7); \
1385 _argvec[8] = (unsigned long)(arg8); \
1386 _argvec[9] = (unsigned long)(arg9); \
1387 _argvec[10] = (unsigned long)(arg10); \
1388 __asm__ volatile( \
1389 VALGRIND_ALIGN_STACK \
1390 "subl $8, %%esp\n\t" \
1391 "pushl 40(%%eax)\n\t" \
1392 "pushl 36(%%eax)\n\t" \
1393 "pushl 32(%%eax)\n\t" \
1394 "pushl 28(%%eax)\n\t" \
1395 "pushl 24(%%eax)\n\t" \
1396 "pushl 20(%%eax)\n\t" \
1397 "pushl 16(%%eax)\n\t" \
1398 "pushl 12(%%eax)\n\t" \
1399 "pushl 8(%%eax)\n\t" \
1400 "pushl 4(%%eax)\n\t" \
1401 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1402 VALGRIND_CALL_NOREDIR_EAX \
1403 VALGRIND_RESTORE_STACK \
1404 : /*out*/ "=a" (_res) \
1405 : /*in*/ "a" (&_argvec[0]) \
1406 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1407 ); \
1408 lval = (__typeof__(lval)) _res; \
1409 } while (0)
1410
1411#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1412 arg6,arg7,arg8,arg9,arg10, \
1413 arg11) \
1414 do { \
1415 volatile OrigFn _orig = (orig); \
1416 volatile unsigned long _argvec[12]; \
1417 volatile unsigned long _res; \
1418 _argvec[0] = (unsigned long)_orig.nraddr; \
1419 _argvec[1] = (unsigned long)(arg1); \
1420 _argvec[2] = (unsigned long)(arg2); \
1421 _argvec[3] = (unsigned long)(arg3); \
1422 _argvec[4] = (unsigned long)(arg4); \
1423 _argvec[5] = (unsigned long)(arg5); \
1424 _argvec[6] = (unsigned long)(arg6); \
1425 _argvec[7] = (unsigned long)(arg7); \
1426 _argvec[8] = (unsigned long)(arg8); \
1427 _argvec[9] = (unsigned long)(arg9); \
1428 _argvec[10] = (unsigned long)(arg10); \
1429 _argvec[11] = (unsigned long)(arg11); \
1430 __asm__ volatile( \
1431 VALGRIND_ALIGN_STACK \
1432 "subl $4, %%esp\n\t" \
1433 "pushl 44(%%eax)\n\t" \
1434 "pushl 40(%%eax)\n\t" \
1435 "pushl 36(%%eax)\n\t" \
1436 "pushl 32(%%eax)\n\t" \
1437 "pushl 28(%%eax)\n\t" \
1438 "pushl 24(%%eax)\n\t" \
1439 "pushl 20(%%eax)\n\t" \
1440 "pushl 16(%%eax)\n\t" \
1441 "pushl 12(%%eax)\n\t" \
1442 "pushl 8(%%eax)\n\t" \
1443 "pushl 4(%%eax)\n\t" \
1444 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1445 VALGRIND_CALL_NOREDIR_EAX \
1446 VALGRIND_RESTORE_STACK \
1447 : /*out*/ "=a" (_res) \
1448 : /*in*/ "a" (&_argvec[0]) \
1449 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1450 ); \
1451 lval = (__typeof__(lval)) _res; \
1452 } while (0)
1453
1454#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1455 arg6,arg7,arg8,arg9,arg10, \
1456 arg11,arg12) \
1457 do { \
1458 volatile OrigFn _orig = (orig); \
1459 volatile unsigned long _argvec[13]; \
1460 volatile unsigned long _res; \
1461 _argvec[0] = (unsigned long)_orig.nraddr; \
1462 _argvec[1] = (unsigned long)(arg1); \
1463 _argvec[2] = (unsigned long)(arg2); \
1464 _argvec[3] = (unsigned long)(arg3); \
1465 _argvec[4] = (unsigned long)(arg4); \
1466 _argvec[5] = (unsigned long)(arg5); \
1467 _argvec[6] = (unsigned long)(arg6); \
1468 _argvec[7] = (unsigned long)(arg7); \
1469 _argvec[8] = (unsigned long)(arg8); \
1470 _argvec[9] = (unsigned long)(arg9); \
1471 _argvec[10] = (unsigned long)(arg10); \
1472 _argvec[11] = (unsigned long)(arg11); \
1473 _argvec[12] = (unsigned long)(arg12); \
1474 __asm__ volatile( \
1475 VALGRIND_ALIGN_STACK \
1476 "pushl 48(%%eax)\n\t" \
1477 "pushl 44(%%eax)\n\t" \
1478 "pushl 40(%%eax)\n\t" \
1479 "pushl 36(%%eax)\n\t" \
1480 "pushl 32(%%eax)\n\t" \
1481 "pushl 28(%%eax)\n\t" \
1482 "pushl 24(%%eax)\n\t" \
1483 "pushl 20(%%eax)\n\t" \
1484 "pushl 16(%%eax)\n\t" \
1485 "pushl 12(%%eax)\n\t" \
1486 "pushl 8(%%eax)\n\t" \
1487 "pushl 4(%%eax)\n\t" \
1488 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1489 VALGRIND_CALL_NOREDIR_EAX \
1490 VALGRIND_RESTORE_STACK \
1491 : /*out*/ "=a" (_res) \
1492 : /*in*/ "a" (&_argvec[0]) \
1493 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1494 ); \
1495 lval = (__typeof__(lval)) _res; \
1496 } while (0)
1497
1498#endif /* PLAT_x86_linux || PLAT_x86_darwin */
1499
1500/* ------------------------ amd64-{linux,darwin} --------------- */
1501
1502#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1503
1504/* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1505
1506/* These regs are trashed by the hidden call. */
1507#define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1508 "rdi", "r8", "r9", "r10", "r11"
1509
1510/* This is all pretty complex. It's so as to make stack unwinding
1511 work reliably. See bug 243270. The basic problem is the sub and
1512 add of 128 of %rsp in all of the following macros. If gcc believes
1513 the CFA is in %rsp, then unwinding may fail, because what's at the
1514 CFA is not what gcc "expected" when it constructs the CFIs for the
1515 places where the macros are instantiated.
1516
1517 But we can't just add a CFI annotation to increase the CFA offset
1518 by 128, to match the sub of 128 from %rsp, because we don't know
1519 whether gcc has chosen %rsp as the CFA at that point, or whether it
1520 has chosen some other register (eg, %rbp). In the latter case,
1521 adding a CFI annotation to change the CFA offset is simply wrong.
1522
1523 So the solution is to get hold of the CFA using
1524 __builtin_dwarf_cfa(), put it in a known register, and add a
1525 CFI annotation to say what the register is. We choose %rbp for
1526 this (perhaps perversely), because:
1527
1528 (1) %rbp is already subject to unwinding. If a new register was
1529 chosen then the unwinder would have to unwind it in all stack
1530 traces, which is expensive, and
1531
1532 (2) %rbp is already subject to precise exception updates in the
1533 JIT. If a new register was chosen, we'd have to have precise
1534 exceptions for it too, which reduces performance of the
1535 generated code.
1536
1537 However .. one extra complication. We can't just whack the result
1538 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1539 list of trashed registers at the end of the inline assembly
1540 fragments; gcc won't allow %rbp to appear in that list. Hence
1541 instead we need to stash %rbp in %r15 for the duration of the asm,
1542 and say that %r15 is trashed instead. gcc seems happy to go with
1543 that.
1544
1545 Oh .. and this all needs to be conditionalised so that it is
1546 unchanged from before this commit, when compiled with older gccs
1547 that don't support __builtin_dwarf_cfa. Furthermore, since
1548 this header file is freestanding, it has to be independent of
1549 config.h, and so the following conditionalisation cannot depend on
1550 configure time checks.
1551
1552 Although it's not clear from
1553 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1554 this expression excludes Darwin.
1555 .cfi directives in Darwin assembly appear to be completely
1556 different and I haven't investigated how they work.
1557
1558 For even more entertainment value, note we have to use the
1559 completely undocumented __builtin_dwarf_cfa(), which appears to
1560 really compute the CFA, whereas __builtin_frame_address(0) claims
1561 to but actually doesn't. See
1562 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1563*/
1564#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1565# define __FRAME_POINTER \
1566 ,"r"(__builtin_dwarf_cfa())
1567# define VALGRIND_CFI_PROLOGUE \
1568 "movq %%rbp, %%r15\n\t" \
1569 "movq %2, %%rbp\n\t" \
1570 ".cfi_remember_state\n\t" \
1571 ".cfi_def_cfa rbp, 0\n\t"
1572# define VALGRIND_CFI_EPILOGUE \
1573 "movq %%r15, %%rbp\n\t" \
1574 ".cfi_restore_state\n\t"
1575#else
1576# define __FRAME_POINTER
1577# define VALGRIND_CFI_PROLOGUE
1578# define VALGRIND_CFI_EPILOGUE
1579#endif
1580
1581/* Macros to save and align the stack before making a function
1582 call and restore it afterwards as gcc may not keep the stack
1583 pointer aligned if it doesn't realise calls are being made
1584 to other functions. */
1585
1586#define VALGRIND_ALIGN_STACK \
1587 "movq %%rsp,%%r14\n\t" \
1588 "andq $0xfffffffffffffff0,%%rsp\n\t"
1589#define VALGRIND_RESTORE_STACK \
1590 "movq %%r14,%%rsp\n\t"
1591
1592/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1593 long) == 8. */
1594
1595/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1596 macros. In order not to trash the stack redzone, we need to drop
1597 %rsp by 128 before the hidden call, and restore afterwards. The
1598 nastyness is that it is only by luck that the stack still appears
1599 to be unwindable during the hidden call - since then the behaviour
1600 of any routine using this macro does not match what the CFI data
1601 says. Sigh.
1602
1603 Why is this important? Imagine that a wrapper has a stack
1604 allocated local, and passes to the hidden call, a pointer to it.
1605 Because gcc does not know about the hidden call, it may allocate
1606 that local in the redzone. Unfortunately the hidden call may then
1607 trash it before it comes to use it. So we must step clear of the
1608 redzone, for the duration of the hidden call, to make it safe.
1609
1610 Probably the same problem afflicts the other redzone-style ABIs too
1611 (ppc64-linux); but for those, the stack is
1612 self describing (none of this CFI nonsense) so at least messing
1613 with the stack pointer doesn't give a danger of non-unwindable
1614 stack. */
1615
1616#define CALL_FN_W_v(lval, orig) \
1617 do { \
1618 volatile OrigFn _orig = (orig); \
1619 volatile unsigned long _argvec[1]; \
1620 volatile unsigned long _res; \
1621 _argvec[0] = (unsigned long)_orig.nraddr; \
1622 __asm__ volatile( \
1623 VALGRIND_CFI_PROLOGUE \
1624 VALGRIND_ALIGN_STACK \
1625 "subq $128,%%rsp\n\t" \
1626 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1627 VALGRIND_CALL_NOREDIR_RAX \
1628 VALGRIND_RESTORE_STACK \
1629 VALGRIND_CFI_EPILOGUE \
1630 : /*out*/ "=a" (_res) \
1631 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1632 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1633 ); \
1634 lval = (__typeof__(lval)) _res; \
1635 } while (0)
1636
1637#define CALL_FN_W_W(lval, orig, arg1) \
1638 do { \
1639 volatile OrigFn _orig = (orig); \
1640 volatile unsigned long _argvec[2]; \
1641 volatile unsigned long _res; \
1642 _argvec[0] = (unsigned long)_orig.nraddr; \
1643 _argvec[1] = (unsigned long)(arg1); \
1644 __asm__ volatile( \
1645 VALGRIND_CFI_PROLOGUE \
1646 VALGRIND_ALIGN_STACK \
1647 "subq $128,%%rsp\n\t" \
1648 "movq 8(%%rax), %%rdi\n\t" \
1649 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1650 VALGRIND_CALL_NOREDIR_RAX \
1651 VALGRIND_RESTORE_STACK \
1652 VALGRIND_CFI_EPILOGUE \
1653 : /*out*/ "=a" (_res) \
1654 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1655 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1656 ); \
1657 lval = (__typeof__(lval)) _res; \
1658 } while (0)
1659
1660#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1661 do { \
1662 volatile OrigFn _orig = (orig); \
1663 volatile unsigned long _argvec[3]; \
1664 volatile unsigned long _res; \
1665 _argvec[0] = (unsigned long)_orig.nraddr; \
1666 _argvec[1] = (unsigned long)(arg1); \
1667 _argvec[2] = (unsigned long)(arg2); \
1668 __asm__ volatile( \
1669 VALGRIND_CFI_PROLOGUE \
1670 VALGRIND_ALIGN_STACK \
1671 "subq $128,%%rsp\n\t" \
1672 "movq 16(%%rax), %%rsi\n\t" \
1673 "movq 8(%%rax), %%rdi\n\t" \
1674 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1675 VALGRIND_CALL_NOREDIR_RAX \
1676 VALGRIND_RESTORE_STACK \
1677 VALGRIND_CFI_EPILOGUE \
1678 : /*out*/ "=a" (_res) \
1679 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1680 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1681 ); \
1682 lval = (__typeof__(lval)) _res; \
1683 } while (0)
1684
1685#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1686 do { \
1687 volatile OrigFn _orig = (orig); \
1688 volatile unsigned long _argvec[4]; \
1689 volatile unsigned long _res; \
1690 _argvec[0] = (unsigned long)_orig.nraddr; \
1691 _argvec[1] = (unsigned long)(arg1); \
1692 _argvec[2] = (unsigned long)(arg2); \
1693 _argvec[3] = (unsigned long)(arg3); \
1694 __asm__ volatile( \
1695 VALGRIND_CFI_PROLOGUE \
1696 VALGRIND_ALIGN_STACK \
1697 "subq $128,%%rsp\n\t" \
1698 "movq 24(%%rax), %%rdx\n\t" \
1699 "movq 16(%%rax), %%rsi\n\t" \
1700 "movq 8(%%rax), %%rdi\n\t" \
1701 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1702 VALGRIND_CALL_NOREDIR_RAX \
1703 VALGRIND_RESTORE_STACK \
1704 VALGRIND_CFI_EPILOGUE \
1705 : /*out*/ "=a" (_res) \
1706 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1707 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1708 ); \
1709 lval = (__typeof__(lval)) _res; \
1710 } while (0)
1711
1712#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1713 do { \
1714 volatile OrigFn _orig = (orig); \
1715 volatile unsigned long _argvec[5]; \
1716 volatile unsigned long _res; \
1717 _argvec[0] = (unsigned long)_orig.nraddr; \
1718 _argvec[1] = (unsigned long)(arg1); \
1719 _argvec[2] = (unsigned long)(arg2); \
1720 _argvec[3] = (unsigned long)(arg3); \
1721 _argvec[4] = (unsigned long)(arg4); \
1722 __asm__ volatile( \
1723 VALGRIND_CFI_PROLOGUE \
1724 VALGRIND_ALIGN_STACK \
1725 "subq $128,%%rsp\n\t" \
1726 "movq 32(%%rax), %%rcx\n\t" \
1727 "movq 24(%%rax), %%rdx\n\t" \
1728 "movq 16(%%rax), %%rsi\n\t" \
1729 "movq 8(%%rax), %%rdi\n\t" \
1730 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1731 VALGRIND_CALL_NOREDIR_RAX \
1732 VALGRIND_RESTORE_STACK \
1733 VALGRIND_CFI_EPILOGUE \
1734 : /*out*/ "=a" (_res) \
1735 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1736 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1737 ); \
1738 lval = (__typeof__(lval)) _res; \
1739 } while (0)
1740
1741#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1742 do { \
1743 volatile OrigFn _orig = (orig); \
1744 volatile unsigned long _argvec[6]; \
1745 volatile unsigned long _res; \
1746 _argvec[0] = (unsigned long)_orig.nraddr; \
1747 _argvec[1] = (unsigned long)(arg1); \
1748 _argvec[2] = (unsigned long)(arg2); \
1749 _argvec[3] = (unsigned long)(arg3); \
1750 _argvec[4] = (unsigned long)(arg4); \
1751 _argvec[5] = (unsigned long)(arg5); \
1752 __asm__ volatile( \
1753 VALGRIND_CFI_PROLOGUE \
1754 VALGRIND_ALIGN_STACK \
1755 "subq $128,%%rsp\n\t" \
1756 "movq 40(%%rax), %%r8\n\t" \
1757 "movq 32(%%rax), %%rcx\n\t" \
1758 "movq 24(%%rax), %%rdx\n\t" \
1759 "movq 16(%%rax), %%rsi\n\t" \
1760 "movq 8(%%rax), %%rdi\n\t" \
1761 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1762 VALGRIND_CALL_NOREDIR_RAX \
1763 VALGRIND_RESTORE_STACK \
1764 VALGRIND_CFI_EPILOGUE \
1765 : /*out*/ "=a" (_res) \
1766 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1767 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1768 ); \
1769 lval = (__typeof__(lval)) _res; \
1770 } while (0)
1771
1772#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1773 do { \
1774 volatile OrigFn _orig = (orig); \
1775 volatile unsigned long _argvec[7]; \
1776 volatile unsigned long _res; \
1777 _argvec[0] = (unsigned long)_orig.nraddr; \
1778 _argvec[1] = (unsigned long)(arg1); \
1779 _argvec[2] = (unsigned long)(arg2); \
1780 _argvec[3] = (unsigned long)(arg3); \
1781 _argvec[4] = (unsigned long)(arg4); \
1782 _argvec[5] = (unsigned long)(arg5); \
1783 _argvec[6] = (unsigned long)(arg6); \
1784 __asm__ volatile( \
1785 VALGRIND_CFI_PROLOGUE \
1786 VALGRIND_ALIGN_STACK \
1787 "subq $128,%%rsp\n\t" \
1788 "movq 48(%%rax), %%r9\n\t" \
1789 "movq 40(%%rax), %%r8\n\t" \
1790 "movq 32(%%rax), %%rcx\n\t" \
1791 "movq 24(%%rax), %%rdx\n\t" \
1792 "movq 16(%%rax), %%rsi\n\t" \
1793 "movq 8(%%rax), %%rdi\n\t" \
1794 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1795 VALGRIND_CALL_NOREDIR_RAX \
1796 VALGRIND_RESTORE_STACK \
1797 VALGRIND_CFI_EPILOGUE \
1798 : /*out*/ "=a" (_res) \
1799 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1800 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1801 ); \
1802 lval = (__typeof__(lval)) _res; \
1803 } while (0)
1804
1805#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1806 arg7) \
1807 do { \
1808 volatile OrigFn _orig = (orig); \
1809 volatile unsigned long _argvec[8]; \
1810 volatile unsigned long _res; \
1811 _argvec[0] = (unsigned long)_orig.nraddr; \
1812 _argvec[1] = (unsigned long)(arg1); \
1813 _argvec[2] = (unsigned long)(arg2); \
1814 _argvec[3] = (unsigned long)(arg3); \
1815 _argvec[4] = (unsigned long)(arg4); \
1816 _argvec[5] = (unsigned long)(arg5); \
1817 _argvec[6] = (unsigned long)(arg6); \
1818 _argvec[7] = (unsigned long)(arg7); \
1819 __asm__ volatile( \
1820 VALGRIND_CFI_PROLOGUE \
1821 VALGRIND_ALIGN_STACK \
1822 "subq $136,%%rsp\n\t" \
1823 "pushq 56(%%rax)\n\t" \
1824 "movq 48(%%rax), %%r9\n\t" \
1825 "movq 40(%%rax), %%r8\n\t" \
1826 "movq 32(%%rax), %%rcx\n\t" \
1827 "movq 24(%%rax), %%rdx\n\t" \
1828 "movq 16(%%rax), %%rsi\n\t" \
1829 "movq 8(%%rax), %%rdi\n\t" \
1830 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1831 VALGRIND_CALL_NOREDIR_RAX \
1832 VALGRIND_RESTORE_STACK \
1833 VALGRIND_CFI_EPILOGUE \
1834 : /*out*/ "=a" (_res) \
1835 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1836 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1837 ); \
1838 lval = (__typeof__(lval)) _res; \
1839 } while (0)
1840
1841#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1842 arg7,arg8) \
1843 do { \
1844 volatile OrigFn _orig = (orig); \
1845 volatile unsigned long _argvec[9]; \
1846 volatile unsigned long _res; \
1847 _argvec[0] = (unsigned long)_orig.nraddr; \
1848 _argvec[1] = (unsigned long)(arg1); \
1849 _argvec[2] = (unsigned long)(arg2); \
1850 _argvec[3] = (unsigned long)(arg3); \
1851 _argvec[4] = (unsigned long)(arg4); \
1852 _argvec[5] = (unsigned long)(arg5); \
1853 _argvec[6] = (unsigned long)(arg6); \
1854 _argvec[7] = (unsigned long)(arg7); \
1855 _argvec[8] = (unsigned long)(arg8); \
1856 __asm__ volatile( \
1857 VALGRIND_CFI_PROLOGUE \
1858 VALGRIND_ALIGN_STACK \
1859 "subq $128,%%rsp\n\t" \
1860 "pushq 64(%%rax)\n\t" \
1861 "pushq 56(%%rax)\n\t" \
1862 "movq 48(%%rax), %%r9\n\t" \
1863 "movq 40(%%rax), %%r8\n\t" \
1864 "movq 32(%%rax), %%rcx\n\t" \
1865 "movq 24(%%rax), %%rdx\n\t" \
1866 "movq 16(%%rax), %%rsi\n\t" \
1867 "movq 8(%%rax), %%rdi\n\t" \
1868 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1869 VALGRIND_CALL_NOREDIR_RAX \
1870 VALGRIND_RESTORE_STACK \
1871 VALGRIND_CFI_EPILOGUE \
1872 : /*out*/ "=a" (_res) \
1873 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1874 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1875 ); \
1876 lval = (__typeof__(lval)) _res; \
1877 } while (0)
1878
1879#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1880 arg7,arg8,arg9) \
1881 do { \
1882 volatile OrigFn _orig = (orig); \
1883 volatile unsigned long _argvec[10]; \
1884 volatile unsigned long _res; \
1885 _argvec[0] = (unsigned long)_orig.nraddr; \
1886 _argvec[1] = (unsigned long)(arg1); \
1887 _argvec[2] = (unsigned long)(arg2); \
1888 _argvec[3] = (unsigned long)(arg3); \
1889 _argvec[4] = (unsigned long)(arg4); \
1890 _argvec[5] = (unsigned long)(arg5); \
1891 _argvec[6] = (unsigned long)(arg6); \
1892 _argvec[7] = (unsigned long)(arg7); \
1893 _argvec[8] = (unsigned long)(arg8); \
1894 _argvec[9] = (unsigned long)(arg9); \
1895 __asm__ volatile( \
1896 VALGRIND_CFI_PROLOGUE \
1897 VALGRIND_ALIGN_STACK \
1898 "subq $136,%%rsp\n\t" \
1899 "pushq 72(%%rax)\n\t" \
1900 "pushq 64(%%rax)\n\t" \
1901 "pushq 56(%%rax)\n\t" \
1902 "movq 48(%%rax), %%r9\n\t" \
1903 "movq 40(%%rax), %%r8\n\t" \
1904 "movq 32(%%rax), %%rcx\n\t" \
1905 "movq 24(%%rax), %%rdx\n\t" \
1906 "movq 16(%%rax), %%rsi\n\t" \
1907 "movq 8(%%rax), %%rdi\n\t" \
1908 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1909 VALGRIND_CALL_NOREDIR_RAX \
1910 VALGRIND_RESTORE_STACK \
1911 VALGRIND_CFI_EPILOGUE \
1912 : /*out*/ "=a" (_res) \
1913 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1914 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1915 ); \
1916 lval = (__typeof__(lval)) _res; \
1917 } while (0)
1918
1919#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1920 arg7,arg8,arg9,arg10) \
1921 do { \
1922 volatile OrigFn _orig = (orig); \
1923 volatile unsigned long _argvec[11]; \
1924 volatile unsigned long _res; \
1925 _argvec[0] = (unsigned long)_orig.nraddr; \
1926 _argvec[1] = (unsigned long)(arg1); \
1927 _argvec[2] = (unsigned long)(arg2); \
1928 _argvec[3] = (unsigned long)(arg3); \
1929 _argvec[4] = (unsigned long)(arg4); \
1930 _argvec[5] = (unsigned long)(arg5); \
1931 _argvec[6] = (unsigned long)(arg6); \
1932 _argvec[7] = (unsigned long)(arg7); \
1933 _argvec[8] = (unsigned long)(arg8); \
1934 _argvec[9] = (unsigned long)(arg9); \
1935 _argvec[10] = (unsigned long)(arg10); \
1936 __asm__ volatile( \
1937 VALGRIND_CFI_PROLOGUE \
1938 VALGRIND_ALIGN_STACK \
1939 "subq $128,%%rsp\n\t" \
1940 "pushq 80(%%rax)\n\t" \
1941 "pushq 72(%%rax)\n\t" \
1942 "pushq 64(%%rax)\n\t" \
1943 "pushq 56(%%rax)\n\t" \
1944 "movq 48(%%rax), %%r9\n\t" \
1945 "movq 40(%%rax), %%r8\n\t" \
1946 "movq 32(%%rax), %%rcx\n\t" \
1947 "movq 24(%%rax), %%rdx\n\t" \
1948 "movq 16(%%rax), %%rsi\n\t" \
1949 "movq 8(%%rax), %%rdi\n\t" \
1950 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1951 VALGRIND_CALL_NOREDIR_RAX \
1952 VALGRIND_RESTORE_STACK \
1953 VALGRIND_CFI_EPILOGUE \
1954 : /*out*/ "=a" (_res) \
1955 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1956 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1957 ); \
1958 lval = (__typeof__(lval)) _res; \
1959 } while (0)
1960
1961#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1962 arg7,arg8,arg9,arg10,arg11) \
1963 do { \
1964 volatile OrigFn _orig = (orig); \
1965 volatile unsigned long _argvec[12]; \
1966 volatile unsigned long _res; \
1967 _argvec[0] = (unsigned long)_orig.nraddr; \
1968 _argvec[1] = (unsigned long)(arg1); \
1969 _argvec[2] = (unsigned long)(arg2); \
1970 _argvec[3] = (unsigned long)(arg3); \
1971 _argvec[4] = (unsigned long)(arg4); \
1972 _argvec[5] = (unsigned long)(arg5); \
1973 _argvec[6] = (unsigned long)(arg6); \
1974 _argvec[7] = (unsigned long)(arg7); \
1975 _argvec[8] = (unsigned long)(arg8); \
1976 _argvec[9] = (unsigned long)(arg9); \
1977 _argvec[10] = (unsigned long)(arg10); \
1978 _argvec[11] = (unsigned long)(arg11); \
1979 __asm__ volatile( \
1980 VALGRIND_CFI_PROLOGUE \
1981 VALGRIND_ALIGN_STACK \
1982 "subq $136,%%rsp\n\t" \
1983 "pushq 88(%%rax)\n\t" \
1984 "pushq 80(%%rax)\n\t" \
1985 "pushq 72(%%rax)\n\t" \
1986 "pushq 64(%%rax)\n\t" \
1987 "pushq 56(%%rax)\n\t" \
1988 "movq 48(%%rax), %%r9\n\t" \
1989 "movq 40(%%rax), %%r8\n\t" \
1990 "movq 32(%%rax), %%rcx\n\t" \
1991 "movq 24(%%rax), %%rdx\n\t" \
1992 "movq 16(%%rax), %%rsi\n\t" \
1993 "movq 8(%%rax), %%rdi\n\t" \
1994 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1995 VALGRIND_CALL_NOREDIR_RAX \
1996 VALGRIND_RESTORE_STACK \
1997 VALGRIND_CFI_EPILOGUE \
1998 : /*out*/ "=a" (_res) \
1999 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2000 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2001 ); \
2002 lval = (__typeof__(lval)) _res; \
2003 } while (0)
2004
2005#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2006 arg7,arg8,arg9,arg10,arg11,arg12) \
2007 do { \
2008 volatile OrigFn _orig = (orig); \
2009 volatile unsigned long _argvec[13]; \
2010 volatile unsigned long _res; \
2011 _argvec[0] = (unsigned long)_orig.nraddr; \
2012 _argvec[1] = (unsigned long)(arg1); \
2013 _argvec[2] = (unsigned long)(arg2); \
2014 _argvec[3] = (unsigned long)(arg3); \
2015 _argvec[4] = (unsigned long)(arg4); \
2016 _argvec[5] = (unsigned long)(arg5); \
2017 _argvec[6] = (unsigned long)(arg6); \
2018 _argvec[7] = (unsigned long)(arg7); \
2019 _argvec[8] = (unsigned long)(arg8); \
2020 _argvec[9] = (unsigned long)(arg9); \
2021 _argvec[10] = (unsigned long)(arg10); \
2022 _argvec[11] = (unsigned long)(arg11); \
2023 _argvec[12] = (unsigned long)(arg12); \
2024 __asm__ volatile( \
2025 VALGRIND_CFI_PROLOGUE \
2026 VALGRIND_ALIGN_STACK \
2027 "subq $128,%%rsp\n\t" \
2028 "pushq 96(%%rax)\n\t" \
2029 "pushq 88(%%rax)\n\t" \
2030 "pushq 80(%%rax)\n\t" \
2031 "pushq 72(%%rax)\n\t" \
2032 "pushq 64(%%rax)\n\t" \
2033 "pushq 56(%%rax)\n\t" \
2034 "movq 48(%%rax), %%r9\n\t" \
2035 "movq 40(%%rax), %%r8\n\t" \
2036 "movq 32(%%rax), %%rcx\n\t" \
2037 "movq 24(%%rax), %%rdx\n\t" \
2038 "movq 16(%%rax), %%rsi\n\t" \
2039 "movq 8(%%rax), %%rdi\n\t" \
2040 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2041 VALGRIND_CALL_NOREDIR_RAX \
2042 VALGRIND_RESTORE_STACK \
2043 VALGRIND_CFI_EPILOGUE \
2044 : /*out*/ "=a" (_res) \
2045 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2046 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2047 ); \
2048 lval = (__typeof__(lval)) _res; \
2049 } while (0)
2050
2051#endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2052
2053/* ------------------------ ppc32-linux ------------------------ */
2054
2055#if defined(PLAT_ppc32_linux)
2056
2057/* This is useful for finding out about the on-stack stuff:
2058
2059 extern int f9 ( int,int,int,int,int,int,int,int,int );
2060 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2061 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2062 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2063
2064 int g9 ( void ) {
2065 return f9(11,22,33,44,55,66,77,88,99);
2066 }
2067 int g10 ( void ) {
2068 return f10(11,22,33,44,55,66,77,88,99,110);
2069 }
2070 int g11 ( void ) {
2071 return f11(11,22,33,44,55,66,77,88,99,110,121);
2072 }
2073 int g12 ( void ) {
2074 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2075 }
2076*/
2077
2078/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2079
2080/* These regs are trashed by the hidden call. */
2081#define __CALLER_SAVED_REGS \
2082 "lr", "ctr", "xer", \
2083 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2084 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2085 "r11", "r12", "r13"
2086
2087/* Macros to save and align the stack before making a function
2088 call and restore it afterwards as gcc may not keep the stack
2089 pointer aligned if it doesn't realise calls are being made
2090 to other functions. */
2091
2092#define VALGRIND_ALIGN_STACK \
2093 "mr 28,1\n\t" \
2094 "rlwinm 1,1,0,0,27\n\t"
2095#define VALGRIND_RESTORE_STACK \
2096 "mr 1,28\n\t"
2097
2098/* These CALL_FN_ macros assume that on ppc32-linux,
2099 sizeof(unsigned long) == 4. */
2100
2101#define CALL_FN_W_v(lval, orig) \
2102 do { \
2103 volatile OrigFn _orig = (orig); \
2104 volatile unsigned long _argvec[1]; \
2105 volatile unsigned long _res; \
2106 _argvec[0] = (unsigned long)_orig.nraddr; \
2107 __asm__ volatile( \
2108 VALGRIND_ALIGN_STACK \
2109 "mr 11,%1\n\t" \
2110 "lwz 11,0(11)\n\t" /* target->r11 */ \
2111 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2112 VALGRIND_RESTORE_STACK \
2113 "mr %0,3" \
2114 : /*out*/ "=r" (_res) \
2115 : /*in*/ "r" (&_argvec[0]) \
2116 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2117 ); \
2118 lval = (__typeof__(lval)) _res; \
2119 } while (0)
2120
2121#define CALL_FN_W_W(lval, orig, arg1) \
2122 do { \
2123 volatile OrigFn _orig = (orig); \
2124 volatile unsigned long _argvec[2]; \
2125 volatile unsigned long _res; \
2126 _argvec[0] = (unsigned long)_orig.nraddr; \
2127 _argvec[1] = (unsigned long)arg1; \
2128 __asm__ volatile( \
2129 VALGRIND_ALIGN_STACK \
2130 "mr 11,%1\n\t" \
2131 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2132 "lwz 11,0(11)\n\t" /* target->r11 */ \
2133 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2134 VALGRIND_RESTORE_STACK \
2135 "mr %0,3" \
2136 : /*out*/ "=r" (_res) \
2137 : /*in*/ "r" (&_argvec[0]) \
2138 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2139 ); \
2140 lval = (__typeof__(lval)) _res; \
2141 } while (0)
2142
2143#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2144 do { \
2145 volatile OrigFn _orig = (orig); \
2146 volatile unsigned long _argvec[3]; \
2147 volatile unsigned long _res; \
2148 _argvec[0] = (unsigned long)_orig.nraddr; \
2149 _argvec[1] = (unsigned long)arg1; \
2150 _argvec[2] = (unsigned long)arg2; \
2151 __asm__ volatile( \
2152 VALGRIND_ALIGN_STACK \
2153 "mr 11,%1\n\t" \
2154 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2155 "lwz 4,8(11)\n\t" \
2156 "lwz 11,0(11)\n\t" /* target->r11 */ \
2157 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2158 VALGRIND_RESTORE_STACK \
2159 "mr %0,3" \
2160 : /*out*/ "=r" (_res) \
2161 : /*in*/ "r" (&_argvec[0]) \
2162 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2163 ); \
2164 lval = (__typeof__(lval)) _res; \
2165 } while (0)
2166
2167#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2168 do { \
2169 volatile OrigFn _orig = (orig); \
2170 volatile unsigned long _argvec[4]; \
2171 volatile unsigned long _res; \
2172 _argvec[0] = (unsigned long)_orig.nraddr; \
2173 _argvec[1] = (unsigned long)arg1; \
2174 _argvec[2] = (unsigned long)arg2; \
2175 _argvec[3] = (unsigned long)arg3; \
2176 __asm__ volatile( \
2177 VALGRIND_ALIGN_STACK \
2178 "mr 11,%1\n\t" \
2179 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2180 "lwz 4,8(11)\n\t" \
2181 "lwz 5,12(11)\n\t" \
2182 "lwz 11,0(11)\n\t" /* target->r11 */ \
2183 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2184 VALGRIND_RESTORE_STACK \
2185 "mr %0,3" \
2186 : /*out*/ "=r" (_res) \
2187 : /*in*/ "r" (&_argvec[0]) \
2188 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2189 ); \
2190 lval = (__typeof__(lval)) _res; \
2191 } while (0)
2192
2193#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2194 do { \
2195 volatile OrigFn _orig = (orig); \
2196 volatile unsigned long _argvec[5]; \
2197 volatile unsigned long _res; \
2198 _argvec[0] = (unsigned long)_orig.nraddr; \
2199 _argvec[1] = (unsigned long)arg1; \
2200 _argvec[2] = (unsigned long)arg2; \
2201 _argvec[3] = (unsigned long)arg3; \
2202 _argvec[4] = (unsigned long)arg4; \
2203 __asm__ volatile( \
2204 VALGRIND_ALIGN_STACK \
2205 "mr 11,%1\n\t" \
2206 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2207 "lwz 4,8(11)\n\t" \
2208 "lwz 5,12(11)\n\t" \
2209 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2210 "lwz 11,0(11)\n\t" /* target->r11 */ \
2211 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2212 VALGRIND_RESTORE_STACK \
2213 "mr %0,3" \
2214 : /*out*/ "=r" (_res) \
2215 : /*in*/ "r" (&_argvec[0]) \
2216 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2217 ); \
2218 lval = (__typeof__(lval)) _res; \
2219 } while (0)
2220
2221#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2222 do { \
2223 volatile OrigFn _orig = (orig); \
2224 volatile unsigned long _argvec[6]; \
2225 volatile unsigned long _res; \
2226 _argvec[0] = (unsigned long)_orig.nraddr; \
2227 _argvec[1] = (unsigned long)arg1; \
2228 _argvec[2] = (unsigned long)arg2; \
2229 _argvec[3] = (unsigned long)arg3; \
2230 _argvec[4] = (unsigned long)arg4; \
2231 _argvec[5] = (unsigned long)arg5; \
2232 __asm__ volatile( \
2233 VALGRIND_ALIGN_STACK \
2234 "mr 11,%1\n\t" \
2235 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2236 "lwz 4,8(11)\n\t" \
2237 "lwz 5,12(11)\n\t" \
2238 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2239 "lwz 7,20(11)\n\t" \
2240 "lwz 11,0(11)\n\t" /* target->r11 */ \
2241 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2242 VALGRIND_RESTORE_STACK \
2243 "mr %0,3" \
2244 : /*out*/ "=r" (_res) \
2245 : /*in*/ "r" (&_argvec[0]) \
2246 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2247 ); \
2248 lval = (__typeof__(lval)) _res; \
2249 } while (0)
2250
2251#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2252 do { \
2253 volatile OrigFn _orig = (orig); \
2254 volatile unsigned long _argvec[7]; \
2255 volatile unsigned long _res; \
2256 _argvec[0] = (unsigned long)_orig.nraddr; \
2257 _argvec[1] = (unsigned long)arg1; \
2258 _argvec[2] = (unsigned long)arg2; \
2259 _argvec[3] = (unsigned long)arg3; \
2260 _argvec[4] = (unsigned long)arg4; \
2261 _argvec[5] = (unsigned long)arg5; \
2262 _argvec[6] = (unsigned long)arg6; \
2263 __asm__ volatile( \
2264 VALGRIND_ALIGN_STACK \
2265 "mr 11,%1\n\t" \
2266 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2267 "lwz 4,8(11)\n\t" \
2268 "lwz 5,12(11)\n\t" \
2269 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2270 "lwz 7,20(11)\n\t" \
2271 "lwz 8,24(11)\n\t" \
2272 "lwz 11,0(11)\n\t" /* target->r11 */ \
2273 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2274 VALGRIND_RESTORE_STACK \
2275 "mr %0,3" \
2276 : /*out*/ "=r" (_res) \
2277 : /*in*/ "r" (&_argvec[0]) \
2278 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2279 ); \
2280 lval = (__typeof__(lval)) _res; \
2281 } while (0)
2282
2283#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2284 arg7) \
2285 do { \
2286 volatile OrigFn _orig = (orig); \
2287 volatile unsigned long _argvec[8]; \
2288 volatile unsigned long _res; \
2289 _argvec[0] = (unsigned long)_orig.nraddr; \
2290 _argvec[1] = (unsigned long)arg1; \
2291 _argvec[2] = (unsigned long)arg2; \
2292 _argvec[3] = (unsigned long)arg3; \
2293 _argvec[4] = (unsigned long)arg4; \
2294 _argvec[5] = (unsigned long)arg5; \
2295 _argvec[6] = (unsigned long)arg6; \
2296 _argvec[7] = (unsigned long)arg7; \
2297 __asm__ volatile( \
2298 VALGRIND_ALIGN_STACK \
2299 "mr 11,%1\n\t" \
2300 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2301 "lwz 4,8(11)\n\t" \
2302 "lwz 5,12(11)\n\t" \
2303 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2304 "lwz 7,20(11)\n\t" \
2305 "lwz 8,24(11)\n\t" \
2306 "lwz 9,28(11)\n\t" \
2307 "lwz 11,0(11)\n\t" /* target->r11 */ \
2308 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2309 VALGRIND_RESTORE_STACK \
2310 "mr %0,3" \
2311 : /*out*/ "=r" (_res) \
2312 : /*in*/ "r" (&_argvec[0]) \
2313 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2314 ); \
2315 lval = (__typeof__(lval)) _res; \
2316 } while (0)
2317
2318#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2319 arg7,arg8) \
2320 do { \
2321 volatile OrigFn _orig = (orig); \
2322 volatile unsigned long _argvec[9]; \
2323 volatile unsigned long _res; \
2324 _argvec[0] = (unsigned long)_orig.nraddr; \
2325 _argvec[1] = (unsigned long)arg1; \
2326 _argvec[2] = (unsigned long)arg2; \
2327 _argvec[3] = (unsigned long)arg3; \
2328 _argvec[4] = (unsigned long)arg4; \
2329 _argvec[5] = (unsigned long)arg5; \
2330 _argvec[6] = (unsigned long)arg6; \
2331 _argvec[7] = (unsigned long)arg7; \
2332 _argvec[8] = (unsigned long)arg8; \
2333 __asm__ volatile( \
2334 VALGRIND_ALIGN_STACK \
2335 "mr 11,%1\n\t" \
2336 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2337 "lwz 4,8(11)\n\t" \
2338 "lwz 5,12(11)\n\t" \
2339 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2340 "lwz 7,20(11)\n\t" \
2341 "lwz 8,24(11)\n\t" \
2342 "lwz 9,28(11)\n\t" \
2343 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2344 "lwz 11,0(11)\n\t" /* target->r11 */ \
2345 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2346 VALGRIND_RESTORE_STACK \
2347 "mr %0,3" \
2348 : /*out*/ "=r" (_res) \
2349 : /*in*/ "r" (&_argvec[0]) \
2350 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2351 ); \
2352 lval = (__typeof__(lval)) _res; \
2353 } while (0)
2354
2355#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2356 arg7,arg8,arg9) \
2357 do { \
2358 volatile OrigFn _orig = (orig); \
2359 volatile unsigned long _argvec[10]; \
2360 volatile unsigned long _res; \
2361 _argvec[0] = (unsigned long)_orig.nraddr; \
2362 _argvec[1] = (unsigned long)arg1; \
2363 _argvec[2] = (unsigned long)arg2; \
2364 _argvec[3] = (unsigned long)arg3; \
2365 _argvec[4] = (unsigned long)arg4; \
2366 _argvec[5] = (unsigned long)arg5; \
2367 _argvec[6] = (unsigned long)arg6; \
2368 _argvec[7] = (unsigned long)arg7; \
2369 _argvec[8] = (unsigned long)arg8; \
2370 _argvec[9] = (unsigned long)arg9; \
2371 __asm__ volatile( \
2372 VALGRIND_ALIGN_STACK \
2373 "mr 11,%1\n\t" \
2374 "addi 1,1,-16\n\t" \
2375 /* arg9 */ \
2376 "lwz 3,36(11)\n\t" \
2377 "stw 3,8(1)\n\t" \
2378 /* args1-8 */ \
2379 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2380 "lwz 4,8(11)\n\t" \
2381 "lwz 5,12(11)\n\t" \
2382 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2383 "lwz 7,20(11)\n\t" \
2384 "lwz 8,24(11)\n\t" \
2385 "lwz 9,28(11)\n\t" \
2386 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2387 "lwz 11,0(11)\n\t" /* target->r11 */ \
2388 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2389 VALGRIND_RESTORE_STACK \
2390 "mr %0,3" \
2391 : /*out*/ "=r" (_res) \
2392 : /*in*/ "r" (&_argvec[0]) \
2393 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2394 ); \
2395 lval = (__typeof__(lval)) _res; \
2396 } while (0)
2397
2398#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2399 arg7,arg8,arg9,arg10) \
2400 do { \
2401 volatile OrigFn _orig = (orig); \
2402 volatile unsigned long _argvec[11]; \
2403 volatile unsigned long _res; \
2404 _argvec[0] = (unsigned long)_orig.nraddr; \
2405 _argvec[1] = (unsigned long)arg1; \
2406 _argvec[2] = (unsigned long)arg2; \
2407 _argvec[3] = (unsigned long)arg3; \
2408 _argvec[4] = (unsigned long)arg4; \
2409 _argvec[5] = (unsigned long)arg5; \
2410 _argvec[6] = (unsigned long)arg6; \
2411 _argvec[7] = (unsigned long)arg7; \
2412 _argvec[8] = (unsigned long)arg8; \
2413 _argvec[9] = (unsigned long)arg9; \
2414 _argvec[10] = (unsigned long)arg10; \
2415 __asm__ volatile( \
2416 VALGRIND_ALIGN_STACK \
2417 "mr 11,%1\n\t" \
2418 "addi 1,1,-16\n\t" \
2419 /* arg10 */ \
2420 "lwz 3,40(11)\n\t" \
2421 "stw 3,12(1)\n\t" \
2422 /* arg9 */ \
2423 "lwz 3,36(11)\n\t" \
2424 "stw 3,8(1)\n\t" \
2425 /* args1-8 */ \
2426 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2427 "lwz 4,8(11)\n\t" \
2428 "lwz 5,12(11)\n\t" \
2429 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2430 "lwz 7,20(11)\n\t" \
2431 "lwz 8,24(11)\n\t" \
2432 "lwz 9,28(11)\n\t" \
2433 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2434 "lwz 11,0(11)\n\t" /* target->r11 */ \
2435 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2436 VALGRIND_RESTORE_STACK \
2437 "mr %0,3" \
2438 : /*out*/ "=r" (_res) \
2439 : /*in*/ "r" (&_argvec[0]) \
2440 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2441 ); \
2442 lval = (__typeof__(lval)) _res; \
2443 } while (0)
2444
2445#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2446 arg7,arg8,arg9,arg10,arg11) \
2447 do { \
2448 volatile OrigFn _orig = (orig); \
2449 volatile unsigned long _argvec[12]; \
2450 volatile unsigned long _res; \
2451 _argvec[0] = (unsigned long)_orig.nraddr; \
2452 _argvec[1] = (unsigned long)arg1; \
2453 _argvec[2] = (unsigned long)arg2; \
2454 _argvec[3] = (unsigned long)arg3; \
2455 _argvec[4] = (unsigned long)arg4; \
2456 _argvec[5] = (unsigned long)arg5; \
2457 _argvec[6] = (unsigned long)arg6; \
2458 _argvec[7] = (unsigned long)arg7; \
2459 _argvec[8] = (unsigned long)arg8; \
2460 _argvec[9] = (unsigned long)arg9; \
2461 _argvec[10] = (unsigned long)arg10; \
2462 _argvec[11] = (unsigned long)arg11; \
2463 __asm__ volatile( \
2464 VALGRIND_ALIGN_STACK \
2465 "mr 11,%1\n\t" \
2466 "addi 1,1,-32\n\t" \
2467 /* arg11 */ \
2468 "lwz 3,44(11)\n\t" \
2469 "stw 3,16(1)\n\t" \
2470 /* arg10 */ \
2471 "lwz 3,40(11)\n\t" \
2472 "stw 3,12(1)\n\t" \
2473 /* arg9 */ \
2474 "lwz 3,36(11)\n\t" \
2475 "stw 3,8(1)\n\t" \
2476 /* args1-8 */ \
2477 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2478 "lwz 4,8(11)\n\t" \
2479 "lwz 5,12(11)\n\t" \
2480 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2481 "lwz 7,20(11)\n\t" \
2482 "lwz 8,24(11)\n\t" \
2483 "lwz 9,28(11)\n\t" \
2484 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2485 "lwz 11,0(11)\n\t" /* target->r11 */ \
2486 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2487 VALGRIND_RESTORE_STACK \
2488 "mr %0,3" \
2489 : /*out*/ "=r" (_res) \
2490 : /*in*/ "r" (&_argvec[0]) \
2491 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2492 ); \
2493 lval = (__typeof__(lval)) _res; \
2494 } while (0)
2495
2496#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2497 arg7,arg8,arg9,arg10,arg11,arg12) \
2498 do { \
2499 volatile OrigFn _orig = (orig); \
2500 volatile unsigned long _argvec[13]; \
2501 volatile unsigned long _res; \
2502 _argvec[0] = (unsigned long)_orig.nraddr; \
2503 _argvec[1] = (unsigned long)arg1; \
2504 _argvec[2] = (unsigned long)arg2; \
2505 _argvec[3] = (unsigned long)arg3; \
2506 _argvec[4] = (unsigned long)arg4; \
2507 _argvec[5] = (unsigned long)arg5; \
2508 _argvec[6] = (unsigned long)arg6; \
2509 _argvec[7] = (unsigned long)arg7; \
2510 _argvec[8] = (unsigned long)arg8; \
2511 _argvec[9] = (unsigned long)arg9; \
2512 _argvec[10] = (unsigned long)arg10; \
2513 _argvec[11] = (unsigned long)arg11; \
2514 _argvec[12] = (unsigned long)arg12; \
2515 __asm__ volatile( \
2516 VALGRIND_ALIGN_STACK \
2517 "mr 11,%1\n\t" \
2518 "addi 1,1,-32\n\t" \
2519 /* arg12 */ \
2520 "lwz 3,48(11)\n\t" \
2521 "stw 3,20(1)\n\t" \
2522 /* arg11 */ \
2523 "lwz 3,44(11)\n\t" \
2524 "stw 3,16(1)\n\t" \
2525 /* arg10 */ \
2526 "lwz 3,40(11)\n\t" \
2527 "stw 3,12(1)\n\t" \
2528 /* arg9 */ \
2529 "lwz 3,36(11)\n\t" \
2530 "stw 3,8(1)\n\t" \
2531 /* args1-8 */ \
2532 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2533 "lwz 4,8(11)\n\t" \
2534 "lwz 5,12(11)\n\t" \
2535 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2536 "lwz 7,20(11)\n\t" \
2537 "lwz 8,24(11)\n\t" \
2538 "lwz 9,28(11)\n\t" \
2539 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2540 "lwz 11,0(11)\n\t" /* target->r11 */ \
2541 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2542 VALGRIND_RESTORE_STACK \
2543 "mr %0,3" \
2544 : /*out*/ "=r" (_res) \
2545 : /*in*/ "r" (&_argvec[0]) \
2546 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2547 ); \
2548 lval = (__typeof__(lval)) _res; \
2549 } while (0)
2550
2551#endif /* PLAT_ppc32_linux */
2552
2553/* ------------------------ ppc64-linux ------------------------ */
2554
2555#if defined(PLAT_ppc64_linux)
2556
2557/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2558
2559/* These regs are trashed by the hidden call. */
2560#define __CALLER_SAVED_REGS \
2561 "lr", "ctr", "xer", \
2562 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2563 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2564 "r11", "r12", "r13"
2565
2566/* Macros to save and align the stack before making a function
2567 call and restore it afterwards as gcc may not keep the stack
2568 pointer aligned if it doesn't realise calls are being made
2569 to other functions. */
2570
2571#define VALGRIND_ALIGN_STACK \
2572 "mr 28,1\n\t" \
2573 "rldicr 1,1,0,59\n\t"
2574#define VALGRIND_RESTORE_STACK \
2575 "mr 1,28\n\t"
2576
2577/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2578 long) == 8. */
2579
2580#define CALL_FN_W_v(lval, orig) \
2581 do { \
2582 volatile OrigFn _orig = (orig); \
2583 volatile unsigned long _argvec[3+0]; \
2584 volatile unsigned long _res; \
2585 /* _argvec[0] holds current r2 across the call */ \
2586 _argvec[1] = (unsigned long)_orig.r2; \
2587 _argvec[2] = (unsigned long)_orig.nraddr; \
2588 __asm__ volatile( \
2589 VALGRIND_ALIGN_STACK \
2590 "mr 11,%1\n\t" \
2591 "std 2,-16(11)\n\t" /* save tocptr */ \
2592 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2593 "ld 11, 0(11)\n\t" /* target->r11 */ \
2594 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2595 "mr 11,%1\n\t" \
2596 "mr %0,3\n\t" \
2597 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2598 VALGRIND_RESTORE_STACK \
2599 : /*out*/ "=r" (_res) \
2600 : /*in*/ "r" (&_argvec[2]) \
2601 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2602 ); \
2603 lval = (__typeof__(lval)) _res; \
2604 } while (0)
2605
2606#define CALL_FN_W_W(lval, orig, arg1) \
2607 do { \
2608 volatile OrigFn _orig = (orig); \
2609 volatile unsigned long _argvec[3+1]; \
2610 volatile unsigned long _res; \
2611 /* _argvec[0] holds current r2 across the call */ \
2612 _argvec[1] = (unsigned long)_orig.r2; \
2613 _argvec[2] = (unsigned long)_orig.nraddr; \
2614 _argvec[2+1] = (unsigned long)arg1; \
2615 __asm__ volatile( \
2616 VALGRIND_ALIGN_STACK \
2617 "mr 11,%1\n\t" \
2618 "std 2,-16(11)\n\t" /* save tocptr */ \
2619 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2620 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2621 "ld 11, 0(11)\n\t" /* target->r11 */ \
2622 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2623 "mr 11,%1\n\t" \
2624 "mr %0,3\n\t" \
2625 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2626 VALGRIND_RESTORE_STACK \
2627 : /*out*/ "=r" (_res) \
2628 : /*in*/ "r" (&_argvec[2]) \
2629 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2630 ); \
2631 lval = (__typeof__(lval)) _res; \
2632 } while (0)
2633
2634#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2635 do { \
2636 volatile OrigFn _orig = (orig); \
2637 volatile unsigned long _argvec[3+2]; \
2638 volatile unsigned long _res; \
2639 /* _argvec[0] holds current r2 across the call */ \
2640 _argvec[1] = (unsigned long)_orig.r2; \
2641 _argvec[2] = (unsigned long)_orig.nraddr; \
2642 _argvec[2+1] = (unsigned long)arg1; \
2643 _argvec[2+2] = (unsigned long)arg2; \
2644 __asm__ volatile( \
2645 VALGRIND_ALIGN_STACK \
2646 "mr 11,%1\n\t" \
2647 "std 2,-16(11)\n\t" /* save tocptr */ \
2648 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2649 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2650 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2651 "ld 11, 0(11)\n\t" /* target->r11 */ \
2652 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2653 "mr 11,%1\n\t" \
2654 "mr %0,3\n\t" \
2655 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2656 VALGRIND_RESTORE_STACK \
2657 : /*out*/ "=r" (_res) \
2658 : /*in*/ "r" (&_argvec[2]) \
2659 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2660 ); \
2661 lval = (__typeof__(lval)) _res; \
2662 } while (0)
2663
2664#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2665 do { \
2666 volatile OrigFn _orig = (orig); \
2667 volatile unsigned long _argvec[3+3]; \
2668 volatile unsigned long _res; \
2669 /* _argvec[0] holds current r2 across the call */ \
2670 _argvec[1] = (unsigned long)_orig.r2; \
2671 _argvec[2] = (unsigned long)_orig.nraddr; \
2672 _argvec[2+1] = (unsigned long)arg1; \
2673 _argvec[2+2] = (unsigned long)arg2; \
2674 _argvec[2+3] = (unsigned long)arg3; \
2675 __asm__ volatile( \
2676 VALGRIND_ALIGN_STACK \
2677 "mr 11,%1\n\t" \
2678 "std 2,-16(11)\n\t" /* save tocptr */ \
2679 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2680 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2681 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2682 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2683 "ld 11, 0(11)\n\t" /* target->r11 */ \
2684 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2685 "mr 11,%1\n\t" \
2686 "mr %0,3\n\t" \
2687 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2688 VALGRIND_RESTORE_STACK \
2689 : /*out*/ "=r" (_res) \
2690 : /*in*/ "r" (&_argvec[2]) \
2691 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2692 ); \
2693 lval = (__typeof__(lval)) _res; \
2694 } while (0)
2695
2696#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2697 do { \
2698 volatile OrigFn _orig = (orig); \
2699 volatile unsigned long _argvec[3+4]; \
2700 volatile unsigned long _res; \
2701 /* _argvec[0] holds current r2 across the call */ \
2702 _argvec[1] = (unsigned long)_orig.r2; \
2703 _argvec[2] = (unsigned long)_orig.nraddr; \
2704 _argvec[2+1] = (unsigned long)arg1; \
2705 _argvec[2+2] = (unsigned long)arg2; \
2706 _argvec[2+3] = (unsigned long)arg3; \
2707 _argvec[2+4] = (unsigned long)arg4; \
2708 __asm__ volatile( \
2709 VALGRIND_ALIGN_STACK \
2710 "mr 11,%1\n\t" \
2711 "std 2,-16(11)\n\t" /* save tocptr */ \
2712 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2713 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2714 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2715 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2716 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2717 "ld 11, 0(11)\n\t" /* target->r11 */ \
2718 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2719 "mr 11,%1\n\t" \
2720 "mr %0,3\n\t" \
2721 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2722 VALGRIND_RESTORE_STACK \
2723 : /*out*/ "=r" (_res) \
2724 : /*in*/ "r" (&_argvec[2]) \
2725 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2726 ); \
2727 lval = (__typeof__(lval)) _res; \
2728 } while (0)
2729
2730#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2731 do { \
2732 volatile OrigFn _orig = (orig); \
2733 volatile unsigned long _argvec[3+5]; \
2734 volatile unsigned long _res; \
2735 /* _argvec[0] holds current r2 across the call */ \
2736 _argvec[1] = (unsigned long)_orig.r2; \
2737 _argvec[2] = (unsigned long)_orig.nraddr; \
2738 _argvec[2+1] = (unsigned long)arg1; \
2739 _argvec[2+2] = (unsigned long)arg2; \
2740 _argvec[2+3] = (unsigned long)arg3; \
2741 _argvec[2+4] = (unsigned long)arg4; \
2742 _argvec[2+5] = (unsigned long)arg5; \
2743 __asm__ volatile( \
2744 VALGRIND_ALIGN_STACK \
2745 "mr 11,%1\n\t" \
2746 "std 2,-16(11)\n\t" /* save tocptr */ \
2747 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2748 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2749 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2750 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2751 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2752 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2753 "ld 11, 0(11)\n\t" /* target->r11 */ \
2754 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2755 "mr 11,%1\n\t" \
2756 "mr %0,3\n\t" \
2757 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2758 VALGRIND_RESTORE_STACK \
2759 : /*out*/ "=r" (_res) \
2760 : /*in*/ "r" (&_argvec[2]) \
2761 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2762 ); \
2763 lval = (__typeof__(lval)) _res; \
2764 } while (0)
2765
2766#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2767 do { \
2768 volatile OrigFn _orig = (orig); \
2769 volatile unsigned long _argvec[3+6]; \
2770 volatile unsigned long _res; \
2771 /* _argvec[0] holds current r2 across the call */ \
2772 _argvec[1] = (unsigned long)_orig.r2; \
2773 _argvec[2] = (unsigned long)_orig.nraddr; \
2774 _argvec[2+1] = (unsigned long)arg1; \
2775 _argvec[2+2] = (unsigned long)arg2; \
2776 _argvec[2+3] = (unsigned long)arg3; \
2777 _argvec[2+4] = (unsigned long)arg4; \
2778 _argvec[2+5] = (unsigned long)arg5; \
2779 _argvec[2+6] = (unsigned long)arg6; \
2780 __asm__ volatile( \
2781 VALGRIND_ALIGN_STACK \
2782 "mr 11,%1\n\t" \
2783 "std 2,-16(11)\n\t" /* save tocptr */ \
2784 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2785 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2786 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2787 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2788 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2789 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2790 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2791 "ld 11, 0(11)\n\t" /* target->r11 */ \
2792 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2793 "mr 11,%1\n\t" \
2794 "mr %0,3\n\t" \
2795 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2796 VALGRIND_RESTORE_STACK \
2797 : /*out*/ "=r" (_res) \
2798 : /*in*/ "r" (&_argvec[2]) \
2799 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2800 ); \
2801 lval = (__typeof__(lval)) _res; \
2802 } while (0)
2803
2804#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2805 arg7) \
2806 do { \
2807 volatile OrigFn _orig = (orig); \
2808 volatile unsigned long _argvec[3+7]; \
2809 volatile unsigned long _res; \
2810 /* _argvec[0] holds current r2 across the call */ \
2811 _argvec[1] = (unsigned long)_orig.r2; \
2812 _argvec[2] = (unsigned long)_orig.nraddr; \
2813 _argvec[2+1] = (unsigned long)arg1; \
2814 _argvec[2+2] = (unsigned long)arg2; \
2815 _argvec[2+3] = (unsigned long)arg3; \
2816 _argvec[2+4] = (unsigned long)arg4; \
2817 _argvec[2+5] = (unsigned long)arg5; \
2818 _argvec[2+6] = (unsigned long)arg6; \
2819 _argvec[2+7] = (unsigned long)arg7; \
2820 __asm__ volatile( \
2821 VALGRIND_ALIGN_STACK \
2822 "mr 11,%1\n\t" \
2823 "std 2,-16(11)\n\t" /* save tocptr */ \
2824 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2825 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2826 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2827 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2828 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2829 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2830 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2831 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2832 "ld 11, 0(11)\n\t" /* target->r11 */ \
2833 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2834 "mr 11,%1\n\t" \
2835 "mr %0,3\n\t" \
2836 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2837 VALGRIND_RESTORE_STACK \
2838 : /*out*/ "=r" (_res) \
2839 : /*in*/ "r" (&_argvec[2]) \
2840 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2841 ); \
2842 lval = (__typeof__(lval)) _res; \
2843 } while (0)
2844
2845#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2846 arg7,arg8) \
2847 do { \
2848 volatile OrigFn _orig = (orig); \
2849 volatile unsigned long _argvec[3+8]; \
2850 volatile unsigned long _res; \
2851 /* _argvec[0] holds current r2 across the call */ \
2852 _argvec[1] = (unsigned long)_orig.r2; \
2853 _argvec[2] = (unsigned long)_orig.nraddr; \
2854 _argvec[2+1] = (unsigned long)arg1; \
2855 _argvec[2+2] = (unsigned long)arg2; \
2856 _argvec[2+3] = (unsigned long)arg3; \
2857 _argvec[2+4] = (unsigned long)arg4; \
2858 _argvec[2+5] = (unsigned long)arg5; \
2859 _argvec[2+6] = (unsigned long)arg6; \
2860 _argvec[2+7] = (unsigned long)arg7; \
2861 _argvec[2+8] = (unsigned long)arg8; \
2862 __asm__ volatile( \
2863 VALGRIND_ALIGN_STACK \
2864 "mr 11,%1\n\t" \
2865 "std 2,-16(11)\n\t" /* save tocptr */ \
2866 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2867 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2868 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2869 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2870 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2871 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2872 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2873 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2874 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2875 "ld 11, 0(11)\n\t" /* target->r11 */ \
2876 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2877 "mr 11,%1\n\t" \
2878 "mr %0,3\n\t" \
2879 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2880 VALGRIND_RESTORE_STACK \
2881 : /*out*/ "=r" (_res) \
2882 : /*in*/ "r" (&_argvec[2]) \
2883 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2884 ); \
2885 lval = (__typeof__(lval)) _res; \
2886 } while (0)
2887
2888#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2889 arg7,arg8,arg9) \
2890 do { \
2891 volatile OrigFn _orig = (orig); \
2892 volatile unsigned long _argvec[3+9]; \
2893 volatile unsigned long _res; \
2894 /* _argvec[0] holds current r2 across the call */ \
2895 _argvec[1] = (unsigned long)_orig.r2; \
2896 _argvec[2] = (unsigned long)_orig.nraddr; \
2897 _argvec[2+1] = (unsigned long)arg1; \
2898 _argvec[2+2] = (unsigned long)arg2; \
2899 _argvec[2+3] = (unsigned long)arg3; \
2900 _argvec[2+4] = (unsigned long)arg4; \
2901 _argvec[2+5] = (unsigned long)arg5; \
2902 _argvec[2+6] = (unsigned long)arg6; \
2903 _argvec[2+7] = (unsigned long)arg7; \
2904 _argvec[2+8] = (unsigned long)arg8; \
2905 _argvec[2+9] = (unsigned long)arg9; \
2906 __asm__ volatile( \
2907 VALGRIND_ALIGN_STACK \
2908 "mr 11,%1\n\t" \
2909 "std 2,-16(11)\n\t" /* save tocptr */ \
2910 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2911 "addi 1,1,-128\n\t" /* expand stack frame */ \
2912 /* arg9 */ \
2913 "ld 3,72(11)\n\t" \
2914 "std 3,112(1)\n\t" \
2915 /* args1-8 */ \
2916 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2917 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2918 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2919 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2920 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2921 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2922 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2923 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2924 "ld 11, 0(11)\n\t" /* target->r11 */ \
2925 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2926 "mr 11,%1\n\t" \
2927 "mr %0,3\n\t" \
2928 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2929 VALGRIND_RESTORE_STACK \
2930 : /*out*/ "=r" (_res) \
2931 : /*in*/ "r" (&_argvec[2]) \
2932 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2933 ); \
2934 lval = (__typeof__(lval)) _res; \
2935 } while (0)
2936
2937#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2938 arg7,arg8,arg9,arg10) \
2939 do { \
2940 volatile OrigFn _orig = (orig); \
2941 volatile unsigned long _argvec[3+10]; \
2942 volatile unsigned long _res; \
2943 /* _argvec[0] holds current r2 across the call */ \
2944 _argvec[1] = (unsigned long)_orig.r2; \
2945 _argvec[2] = (unsigned long)_orig.nraddr; \
2946 _argvec[2+1] = (unsigned long)arg1; \
2947 _argvec[2+2] = (unsigned long)arg2; \
2948 _argvec[2+3] = (unsigned long)arg3; \
2949 _argvec[2+4] = (unsigned long)arg4; \
2950 _argvec[2+5] = (unsigned long)arg5; \
2951 _argvec[2+6] = (unsigned long)arg6; \
2952 _argvec[2+7] = (unsigned long)arg7; \
2953 _argvec[2+8] = (unsigned long)arg8; \
2954 _argvec[2+9] = (unsigned long)arg9; \
2955 _argvec[2+10] = (unsigned long)arg10; \
2956 __asm__ volatile( \
2957 VALGRIND_ALIGN_STACK \
2958 "mr 11,%1\n\t" \
2959 "std 2,-16(11)\n\t" /* save tocptr */ \
2960 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2961 "addi 1,1,-128\n\t" /* expand stack frame */ \
2962 /* arg10 */ \
2963 "ld 3,80(11)\n\t" \
2964 "std 3,120(1)\n\t" \
2965 /* arg9 */ \
2966 "ld 3,72(11)\n\t" \
2967 "std 3,112(1)\n\t" \
2968 /* args1-8 */ \
2969 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2970 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2971 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2972 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2973 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2974 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2975 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2976 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2977 "ld 11, 0(11)\n\t" /* target->r11 */ \
2978 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2979 "mr 11,%1\n\t" \
2980 "mr %0,3\n\t" \
2981 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2982 VALGRIND_RESTORE_STACK \
2983 : /*out*/ "=r" (_res) \
2984 : /*in*/ "r" (&_argvec[2]) \
2985 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2986 ); \
2987 lval = (__typeof__(lval)) _res; \
2988 } while (0)
2989
2990#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2991 arg7,arg8,arg9,arg10,arg11) \
2992 do { \
2993 volatile OrigFn _orig = (orig); \
2994 volatile unsigned long _argvec[3+11]; \
2995 volatile unsigned long _res; \
2996 /* _argvec[0] holds current r2 across the call */ \
2997 _argvec[1] = (unsigned long)_orig.r2; \
2998 _argvec[2] = (unsigned long)_orig.nraddr; \
2999 _argvec[2+1] = (unsigned long)arg1; \
3000 _argvec[2+2] = (unsigned long)arg2; \
3001 _argvec[2+3] = (unsigned long)arg3; \
3002 _argvec[2+4] = (unsigned long)arg4; \
3003 _argvec[2+5] = (unsigned long)arg5; \
3004 _argvec[2+6] = (unsigned long)arg6; \
3005 _argvec[2+7] = (unsigned long)arg7; \
3006 _argvec[2+8] = (unsigned long)arg8; \
3007 _argvec[2+9] = (unsigned long)arg9; \
3008 _argvec[2+10] = (unsigned long)arg10; \
3009 _argvec[2+11] = (unsigned long)arg11; \
3010 __asm__ volatile( \
3011 VALGRIND_ALIGN_STACK \
3012 "mr 11,%1\n\t" \
3013 "std 2,-16(11)\n\t" /* save tocptr */ \
3014 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3015 "addi 1,1,-144\n\t" /* expand stack frame */ \
3016 /* arg11 */ \
3017 "ld 3,88(11)\n\t" \
3018 "std 3,128(1)\n\t" \
3019 /* arg10 */ \
3020 "ld 3,80(11)\n\t" \
3021 "std 3,120(1)\n\t" \
3022 /* arg9 */ \
3023 "ld 3,72(11)\n\t" \
3024 "std 3,112(1)\n\t" \
3025 /* args1-8 */ \
3026 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3027 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3028 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3029 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3030 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3031 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3032 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3033 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3034 "ld 11, 0(11)\n\t" /* target->r11 */ \
3035 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3036 "mr 11,%1\n\t" \
3037 "mr %0,3\n\t" \
3038 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3039 VALGRIND_RESTORE_STACK \
3040 : /*out*/ "=r" (_res) \
3041 : /*in*/ "r" (&_argvec[2]) \
3042 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3043 ); \
3044 lval = (__typeof__(lval)) _res; \
3045 } while (0)
3046
3047#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3048 arg7,arg8,arg9,arg10,arg11,arg12) \
3049 do { \
3050 volatile OrigFn _orig = (orig); \
3051 volatile unsigned long _argvec[3+12]; \
3052 volatile unsigned long _res; \
3053 /* _argvec[0] holds current r2 across the call */ \
3054 _argvec[1] = (unsigned long)_orig.r2; \
3055 _argvec[2] = (unsigned long)_orig.nraddr; \
3056 _argvec[2+1] = (unsigned long)arg1; \
3057 _argvec[2+2] = (unsigned long)arg2; \
3058 _argvec[2+3] = (unsigned long)arg3; \
3059 _argvec[2+4] = (unsigned long)arg4; \
3060 _argvec[2+5] = (unsigned long)arg5; \
3061 _argvec[2+6] = (unsigned long)arg6; \
3062 _argvec[2+7] = (unsigned long)arg7; \
3063 _argvec[2+8] = (unsigned long)arg8; \
3064 _argvec[2+9] = (unsigned long)arg9; \
3065 _argvec[2+10] = (unsigned long)arg10; \
3066 _argvec[2+11] = (unsigned long)arg11; \
3067 _argvec[2+12] = (unsigned long)arg12; \
3068 __asm__ volatile( \
3069 VALGRIND_ALIGN_STACK \
3070 "mr 11,%1\n\t" \
3071 "std 2,-16(11)\n\t" /* save tocptr */ \
3072 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3073 "addi 1,1,-144\n\t" /* expand stack frame */ \
3074 /* arg12 */ \
3075 "ld 3,96(11)\n\t" \
3076 "std 3,136(1)\n\t" \
3077 /* arg11 */ \
3078 "ld 3,88(11)\n\t" \
3079 "std 3,128(1)\n\t" \
3080 /* arg10 */ \
3081 "ld 3,80(11)\n\t" \
3082 "std 3,120(1)\n\t" \
3083 /* arg9 */ \
3084 "ld 3,72(11)\n\t" \
3085 "std 3,112(1)\n\t" \
3086 /* args1-8 */ \
3087 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3088 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3089 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3090 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3091 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3092 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3093 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3094 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3095 "ld 11, 0(11)\n\t" /* target->r11 */ \
3096 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3097 "mr 11,%1\n\t" \
3098 "mr %0,3\n\t" \
3099 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3100 VALGRIND_RESTORE_STACK \
3101 : /*out*/ "=r" (_res) \
3102 : /*in*/ "r" (&_argvec[2]) \
3103 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3104 ); \
3105 lval = (__typeof__(lval)) _res; \
3106 } while (0)
3107
3108#endif /* PLAT_ppc64_linux */
3109
3110/* ------------------------- arm-linux ------------------------- */
3111
3112#if defined(PLAT_arm_linux)
3113
3114/* These regs are trashed by the hidden call. */
3115#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3116
3117/* Macros to save and align the stack before making a function
3118 call and restore it afterwards as gcc may not keep the stack
3119 pointer aligned if it doesn't realise calls are being made
3120 to other functions. */
3121
3122/* This is a bit tricky. We store the original stack pointer in r10
3123 as it is callee-saves. gcc doesn't allow the use of r11 for some
3124 reason. Also, we can't directly "bic" the stack pointer in thumb
3125 mode since r13 isn't an allowed register number in that context.
3126 So use r4 as a temporary, since that is about to get trashed
3127 anyway, just after each use of this macro. Side effect is we need
3128 to be very careful about any future changes, since
3129 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3130#define VALGRIND_ALIGN_STACK \
3131 "mov r10, sp\n\t" \
3132 "mov r4, sp\n\t" \
3133 "bic r4, r4, #7\n\t" \
3134 "mov sp, r4\n\t"
3135#define VALGRIND_RESTORE_STACK \
3136 "mov sp, r10\n\t"
3137
3138/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3139 long) == 4. */
3140
3141#define CALL_FN_W_v(lval, orig) \
3142 do { \
3143 volatile OrigFn _orig = (orig); \
3144 volatile unsigned long _argvec[1]; \
3145 volatile unsigned long _res; \
3146 _argvec[0] = (unsigned long)_orig.nraddr; \
3147 __asm__ volatile( \
3148 VALGRIND_ALIGN_STACK \
3149 "ldr r4, [%1] \n\t" /* target->r4 */ \
3150 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3151 VALGRIND_RESTORE_STACK \
3152 "mov %0, r0\n" \
3153 : /*out*/ "=r" (_res) \
3154 : /*in*/ "0" (&_argvec[0]) \
3155 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3156 ); \
3157 lval = (__typeof__(lval)) _res; \
3158 } while (0)
3159
3160#define CALL_FN_W_W(lval, orig, arg1) \
3161 do { \
3162 volatile OrigFn _orig = (orig); \
3163 volatile unsigned long _argvec[2]; \
3164 volatile unsigned long _res; \
3165 _argvec[0] = (unsigned long)_orig.nraddr; \
3166 _argvec[1] = (unsigned long)(arg1); \
3167 __asm__ volatile( \
3168 VALGRIND_ALIGN_STACK \
3169 "ldr r0, [%1, #4] \n\t" \
3170 "ldr r4, [%1] \n\t" /* target->r4 */ \
3171 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3172 VALGRIND_RESTORE_STACK \
3173 "mov %0, r0\n" \
3174 : /*out*/ "=r" (_res) \
3175 : /*in*/ "0" (&_argvec[0]) \
3176 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3177 ); \
3178 lval = (__typeof__(lval)) _res; \
3179 } while (0)
3180
3181#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3182 do { \
3183 volatile OrigFn _orig = (orig); \
3184 volatile unsigned long _argvec[3]; \
3185 volatile unsigned long _res; \
3186 _argvec[0] = (unsigned long)_orig.nraddr; \
3187 _argvec[1] = (unsigned long)(arg1); \
3188 _argvec[2] = (unsigned long)(arg2); \
3189 __asm__ volatile( \
3190 VALGRIND_ALIGN_STACK \
3191 "ldr r0, [%1, #4] \n\t" \
3192 "ldr r1, [%1, #8] \n\t" \
3193 "ldr r4, [%1] \n\t" /* target->r4 */ \
3194 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3195 VALGRIND_RESTORE_STACK \
3196 "mov %0, r0\n" \
3197 : /*out*/ "=r" (_res) \
3198 : /*in*/ "0" (&_argvec[0]) \
3199 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3200 ); \
3201 lval = (__typeof__(lval)) _res; \
3202 } while (0)
3203
3204#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3205 do { \
3206 volatile OrigFn _orig = (orig); \
3207 volatile unsigned long _argvec[4]; \
3208 volatile unsigned long _res; \
3209 _argvec[0] = (unsigned long)_orig.nraddr; \
3210 _argvec[1] = (unsigned long)(arg1); \
3211 _argvec[2] = (unsigned long)(arg2); \
3212 _argvec[3] = (unsigned long)(arg3); \
3213 __asm__ volatile( \
3214 VALGRIND_ALIGN_STACK \
3215 "ldr r0, [%1, #4] \n\t" \
3216 "ldr r1, [%1, #8] \n\t" \
3217 "ldr r2, [%1, #12] \n\t" \
3218 "ldr r4, [%1] \n\t" /* target->r4 */ \
3219 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3220 VALGRIND_RESTORE_STACK \
3221 "mov %0, r0\n" \
3222 : /*out*/ "=r" (_res) \
3223 : /*in*/ "0" (&_argvec[0]) \
3224 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3225 ); \
3226 lval = (__typeof__(lval)) _res; \
3227 } while (0)
3228
3229#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3230 do { \
3231 volatile OrigFn _orig = (orig); \
3232 volatile unsigned long _argvec[5]; \
3233 volatile unsigned long _res; \
3234 _argvec[0] = (unsigned long)_orig.nraddr; \
3235 _argvec[1] = (unsigned long)(arg1); \
3236 _argvec[2] = (unsigned long)(arg2); \
3237 _argvec[3] = (unsigned long)(arg3); \
3238 _argvec[4] = (unsigned long)(arg4); \
3239 __asm__ volatile( \
3240 VALGRIND_ALIGN_STACK \
3241 "ldr r0, [%1, #4] \n\t" \
3242 "ldr r1, [%1, #8] \n\t" \
3243 "ldr r2, [%1, #12] \n\t" \
3244 "ldr r3, [%1, #16] \n\t" \
3245 "ldr r4, [%1] \n\t" /* target->r4 */ \
3246 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3247 VALGRIND_RESTORE_STACK \
3248 "mov %0, r0" \
3249 : /*out*/ "=r" (_res) \
3250 : /*in*/ "0" (&_argvec[0]) \
3251 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3252 ); \
3253 lval = (__typeof__(lval)) _res; \
3254 } while (0)
3255
3256#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3257 do { \
3258 volatile OrigFn _orig = (orig); \
3259 volatile unsigned long _argvec[6]; \
3260 volatile unsigned long _res; \
3261 _argvec[0] = (unsigned long)_orig.nraddr; \
3262 _argvec[1] = (unsigned long)(arg1); \
3263 _argvec[2] = (unsigned long)(arg2); \
3264 _argvec[3] = (unsigned long)(arg3); \
3265 _argvec[4] = (unsigned long)(arg4); \
3266 _argvec[5] = (unsigned long)(arg5); \
3267 __asm__ volatile( \
3268 VALGRIND_ALIGN_STACK \
3269 "sub sp, sp, #4 \n\t" \
3270 "ldr r0, [%1, #20] \n\t" \
3271 "push {r0} \n\t" \
3272 "ldr r0, [%1, #4] \n\t" \
3273 "ldr r1, [%1, #8] \n\t" \
3274 "ldr r2, [%1, #12] \n\t" \
3275 "ldr r3, [%1, #16] \n\t" \
3276 "ldr r4, [%1] \n\t" /* target->r4 */ \
3277 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3278 VALGRIND_RESTORE_STACK \
3279 "mov %0, r0" \
3280 : /*out*/ "=r" (_res) \
3281 : /*in*/ "0" (&_argvec[0]) \
3282 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3283 ); \
3284 lval = (__typeof__(lval)) _res; \
3285 } while (0)
3286
3287#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3288 do { \
3289 volatile OrigFn _orig = (orig); \
3290 volatile unsigned long _argvec[7]; \
3291 volatile unsigned long _res; \
3292 _argvec[0] = (unsigned long)_orig.nraddr; \
3293 _argvec[1] = (unsigned long)(arg1); \
3294 _argvec[2] = (unsigned long)(arg2); \
3295 _argvec[3] = (unsigned long)(arg3); \
3296 _argvec[4] = (unsigned long)(arg4); \
3297 _argvec[5] = (unsigned long)(arg5); \
3298 _argvec[6] = (unsigned long)(arg6); \
3299 __asm__ volatile( \
3300 VALGRIND_ALIGN_STACK \
3301 "ldr r0, [%1, #20] \n\t" \
3302 "ldr r1, [%1, #24] \n\t" \
3303 "push {r0, r1} \n\t" \
3304 "ldr r0, [%1, #4] \n\t" \
3305 "ldr r1, [%1, #8] \n\t" \
3306 "ldr r2, [%1, #12] \n\t" \
3307 "ldr r3, [%1, #16] \n\t" \
3308 "ldr r4, [%1] \n\t" /* target->r4 */ \
3309 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3310 VALGRIND_RESTORE_STACK \
3311 "mov %0, r0" \
3312 : /*out*/ "=r" (_res) \
3313 : /*in*/ "0" (&_argvec[0]) \
3314 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3315 ); \
3316 lval = (__typeof__(lval)) _res; \
3317 } while (0)
3318
3319#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3320 arg7) \
3321 do { \
3322 volatile OrigFn _orig = (orig); \
3323 volatile unsigned long _argvec[8]; \
3324 volatile unsigned long _res; \
3325 _argvec[0] = (unsigned long)_orig.nraddr; \
3326 _argvec[1] = (unsigned long)(arg1); \
3327 _argvec[2] = (unsigned long)(arg2); \
3328 _argvec[3] = (unsigned long)(arg3); \
3329 _argvec[4] = (unsigned long)(arg4); \
3330 _argvec[5] = (unsigned long)(arg5); \
3331 _argvec[6] = (unsigned long)(arg6); \
3332 _argvec[7] = (unsigned long)(arg7); \
3333 __asm__ volatile( \
3334 VALGRIND_ALIGN_STACK \
3335 "sub sp, sp, #4 \n\t" \
3336 "ldr r0, [%1, #20] \n\t" \
3337 "ldr r1, [%1, #24] \n\t" \
3338 "ldr r2, [%1, #28] \n\t" \
3339 "push {r0, r1, r2} \n\t" \
3340 "ldr r0, [%1, #4] \n\t" \
3341 "ldr r1, [%1, #8] \n\t" \
3342 "ldr r2, [%1, #12] \n\t" \
3343 "ldr r3, [%1, #16] \n\t" \
3344 "ldr r4, [%1] \n\t" /* target->r4 */ \
3345 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3346 VALGRIND_RESTORE_STACK \
3347 "mov %0, r0" \
3348 : /*out*/ "=r" (_res) \
3349 : /*in*/ "0" (&_argvec[0]) \
3350 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3351 ); \
3352 lval = (__typeof__(lval)) _res; \
3353 } while (0)
3354
3355#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3356 arg7,arg8) \
3357 do { \
3358 volatile OrigFn _orig = (orig); \
3359 volatile unsigned long _argvec[9]; \
3360 volatile unsigned long _res; \
3361 _argvec[0] = (unsigned long)_orig.nraddr; \
3362 _argvec[1] = (unsigned long)(arg1); \
3363 _argvec[2] = (unsigned long)(arg2); \
3364 _argvec[3] = (unsigned long)(arg3); \
3365 _argvec[4] = (unsigned long)(arg4); \
3366 _argvec[5] = (unsigned long)(arg5); \
3367 _argvec[6] = (unsigned long)(arg6); \
3368 _argvec[7] = (unsigned long)(arg7); \
3369 _argvec[8] = (unsigned long)(arg8); \
3370 __asm__ volatile( \
3371 VALGRIND_ALIGN_STACK \
3372 "ldr r0, [%1, #20] \n\t" \
3373 "ldr r1, [%1, #24] \n\t" \
3374 "ldr r2, [%1, #28] \n\t" \
3375 "ldr r3, [%1, #32] \n\t" \
3376 "push {r0, r1, r2, r3} \n\t" \
3377 "ldr r0, [%1, #4] \n\t" \
3378 "ldr r1, [%1, #8] \n\t" \
3379 "ldr r2, [%1, #12] \n\t" \
3380 "ldr r3, [%1, #16] \n\t" \
3381 "ldr r4, [%1] \n\t" /* target->r4 */ \
3382 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3383 VALGRIND_RESTORE_STACK \
3384 "mov %0, r0" \
3385 : /*out*/ "=r" (_res) \
3386 : /*in*/ "0" (&_argvec[0]) \
3387 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3388 ); \
3389 lval = (__typeof__(lval)) _res; \
3390 } while (0)
3391
3392#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3393 arg7,arg8,arg9) \
3394 do { \
3395 volatile OrigFn _orig = (orig); \
3396 volatile unsigned long _argvec[10]; \
3397 volatile unsigned long _res; \
3398 _argvec[0] = (unsigned long)_orig.nraddr; \
3399 _argvec[1] = (unsigned long)(arg1); \
3400 _argvec[2] = (unsigned long)(arg2); \
3401 _argvec[3] = (unsigned long)(arg3); \
3402 _argvec[4] = (unsigned long)(arg4); \
3403 _argvec[5] = (unsigned long)(arg5); \
3404 _argvec[6] = (unsigned long)(arg6); \
3405 _argvec[7] = (unsigned long)(arg7); \
3406 _argvec[8] = (unsigned long)(arg8); \
3407 _argvec[9] = (unsigned long)(arg9); \
3408 __asm__ volatile( \
3409 VALGRIND_ALIGN_STACK \
3410 "sub sp, sp, #4 \n\t" \
3411 "ldr r0, [%1, #20] \n\t" \
3412 "ldr r1, [%1, #24] \n\t" \
3413 "ldr r2, [%1, #28] \n\t" \
3414 "ldr r3, [%1, #32] \n\t" \
3415 "ldr r4, [%1, #36] \n\t" \
3416 "push {r0, r1, r2, r3, r4} \n\t" \
3417 "ldr r0, [%1, #4] \n\t" \
3418 "ldr r1, [%1, #8] \n\t" \
3419 "ldr r2, [%1, #12] \n\t" \
3420 "ldr r3, [%1, #16] \n\t" \
3421 "ldr r4, [%1] \n\t" /* target->r4 */ \
3422 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3423 VALGRIND_RESTORE_STACK \
3424 "mov %0, r0" \
3425 : /*out*/ "=r" (_res) \
3426 : /*in*/ "0" (&_argvec[0]) \
3427 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3428 ); \
3429 lval = (__typeof__(lval)) _res; \
3430 } while (0)
3431
3432#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3433 arg7,arg8,arg9,arg10) \
3434 do { \
3435 volatile OrigFn _orig = (orig); \
3436 volatile unsigned long _argvec[11]; \
3437 volatile unsigned long _res; \
3438 _argvec[0] = (unsigned long)_orig.nraddr; \
3439 _argvec[1] = (unsigned long)(arg1); \
3440 _argvec[2] = (unsigned long)(arg2); \
3441 _argvec[3] = (unsigned long)(arg3); \
3442 _argvec[4] = (unsigned long)(arg4); \
3443 _argvec[5] = (unsigned long)(arg5); \
3444 _argvec[6] = (unsigned long)(arg6); \
3445 _argvec[7] = (unsigned long)(arg7); \
3446 _argvec[8] = (unsigned long)(arg8); \
3447 _argvec[9] = (unsigned long)(arg9); \
3448 _argvec[10] = (unsigned long)(arg10); \
3449 __asm__ volatile( \
3450 VALGRIND_ALIGN_STACK \
3451 "ldr r0, [%1, #40] \n\t" \
3452 "push {r0} \n\t" \
3453 "ldr r0, [%1, #20] \n\t" \
3454 "ldr r1, [%1, #24] \n\t" \
3455 "ldr r2, [%1, #28] \n\t" \
3456 "ldr r3, [%1, #32] \n\t" \
3457 "ldr r4, [%1, #36] \n\t" \
3458 "push {r0, r1, r2, r3, r4} \n\t" \
3459 "ldr r0, [%1, #4] \n\t" \
3460 "ldr r1, [%1, #8] \n\t" \
3461 "ldr r2, [%1, #12] \n\t" \
3462 "ldr r3, [%1, #16] \n\t" \
3463 "ldr r4, [%1] \n\t" /* target->r4 */ \
3464 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3465 VALGRIND_RESTORE_STACK \
3466 "mov %0, r0" \
3467 : /*out*/ "=r" (_res) \
3468 : /*in*/ "0" (&_argvec[0]) \
3469 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3470 ); \
3471 lval = (__typeof__(lval)) _res; \
3472 } while (0)
3473
3474#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3475 arg6,arg7,arg8,arg9,arg10, \
3476 arg11) \
3477 do { \
3478 volatile OrigFn _orig = (orig); \
3479 volatile unsigned long _argvec[12]; \
3480 volatile unsigned long _res; \
3481 _argvec[0] = (unsigned long)_orig.nraddr; \
3482 _argvec[1] = (unsigned long)(arg1); \
3483 _argvec[2] = (unsigned long)(arg2); \
3484 _argvec[3] = (unsigned long)(arg3); \
3485 _argvec[4] = (unsigned long)(arg4); \
3486 _argvec[5] = (unsigned long)(arg5); \
3487 _argvec[6] = (unsigned long)(arg6); \
3488 _argvec[7] = (unsigned long)(arg7); \
3489 _argvec[8] = (unsigned long)(arg8); \
3490 _argvec[9] = (unsigned long)(arg9); \
3491 _argvec[10] = (unsigned long)(arg10); \
3492 _argvec[11] = (unsigned long)(arg11); \
3493 __asm__ volatile( \
3494 VALGRIND_ALIGN_STACK \
3495 "sub sp, sp, #4 \n\t" \
3496 "ldr r0, [%1, #40] \n\t" \
3497 "ldr r1, [%1, #44] \n\t" \
3498 "push {r0, r1} \n\t" \
3499 "ldr r0, [%1, #20] \n\t" \
3500 "ldr r1, [%1, #24] \n\t" \
3501 "ldr r2, [%1, #28] \n\t" \
3502 "ldr r3, [%1, #32] \n\t" \
3503 "ldr r4, [%1, #36] \n\t" \
3504 "push {r0, r1, r2, r3, r4} \n\t" \
3505 "ldr r0, [%1, #4] \n\t" \
3506 "ldr r1, [%1, #8] \n\t" \
3507 "ldr r2, [%1, #12] \n\t" \
3508 "ldr r3, [%1, #16] \n\t" \
3509 "ldr r4, [%1] \n\t" /* target->r4 */ \
3510 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3511 VALGRIND_RESTORE_STACK \
3512 "mov %0, r0" \
3513 : /*out*/ "=r" (_res) \
3514 : /*in*/ "0" (&_argvec[0]) \
3515 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3516 ); \
3517 lval = (__typeof__(lval)) _res; \
3518 } while (0)
3519
3520#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3521 arg6,arg7,arg8,arg9,arg10, \
3522 arg11,arg12) \
3523 do { \
3524 volatile OrigFn _orig = (orig); \
3525 volatile unsigned long _argvec[13]; \
3526 volatile unsigned long _res; \
3527 _argvec[0] = (unsigned long)_orig.nraddr; \
3528 _argvec[1] = (unsigned long)(arg1); \
3529 _argvec[2] = (unsigned long)(arg2); \
3530 _argvec[3] = (unsigned long)(arg3); \
3531 _argvec[4] = (unsigned long)(arg4); \
3532 _argvec[5] = (unsigned long)(arg5); \
3533 _argvec[6] = (unsigned long)(arg6); \
3534 _argvec[7] = (unsigned long)(arg7); \
3535 _argvec[8] = (unsigned long)(arg8); \
3536 _argvec[9] = (unsigned long)(arg9); \
3537 _argvec[10] = (unsigned long)(arg10); \
3538 _argvec[11] = (unsigned long)(arg11); \
3539 _argvec[12] = (unsigned long)(arg12); \
3540 __asm__ volatile( \
3541 VALGRIND_ALIGN_STACK \
3542 "ldr r0, [%1, #40] \n\t" \
3543 "ldr r1, [%1, #44] \n\t" \
3544 "ldr r2, [%1, #48] \n\t" \
3545 "push {r0, r1, r2} \n\t" \
3546 "ldr r0, [%1, #20] \n\t" \
3547 "ldr r1, [%1, #24] \n\t" \
3548 "ldr r2, [%1, #28] \n\t" \
3549 "ldr r3, [%1, #32] \n\t" \
3550 "ldr r4, [%1, #36] \n\t" \
3551 "push {r0, r1, r2, r3, r4} \n\t" \
3552 "ldr r0, [%1, #4] \n\t" \
3553 "ldr r1, [%1, #8] \n\t" \
3554 "ldr r2, [%1, #12] \n\t" \
3555 "ldr r3, [%1, #16] \n\t" \
3556 "ldr r4, [%1] \n\t" /* target->r4 */ \
3557 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3558 VALGRIND_RESTORE_STACK \
3559 "mov %0, r0" \
3560 : /*out*/ "=r" (_res) \
3561 : /*in*/ "0" (&_argvec[0]) \
3562 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3563 ); \
3564 lval = (__typeof__(lval)) _res; \
3565 } while (0)
3566
3567#endif /* PLAT_arm_linux */
3568
3569/* ------------------------ arm64-linux ------------------------ */
3570
3571#if defined(PLAT_arm64_linux)
3572
3573/* These regs are trashed by the hidden call. */
3574#define __CALLER_SAVED_REGS \
3575 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
3576 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
3577 "x18", "x19", "x20", "x30", \
3578 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
3579 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
3580 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
3581 "v26", "v27", "v28", "v29", "v30", "v31"
3582
3583/* x21 is callee-saved, so we can use it to save and restore SP around
3584 the hidden call. */
3585#define VALGRIND_ALIGN_STACK \
3586 "mov x21, sp\n\t" \
3587 "bic sp, x21, #15\n\t"
3588#define VALGRIND_RESTORE_STACK \
3589 "mov sp, x21\n\t"
3590
3591/* These CALL_FN_ macros assume that on arm64-linux,
3592 sizeof(unsigned long) == 8. */
3593
3594#define CALL_FN_W_v(lval, orig) \
3595 do { \
3596 volatile OrigFn _orig = (orig); \
3597 volatile unsigned long _argvec[1]; \
3598 volatile unsigned long _res; \
3599 _argvec[0] = (unsigned long)_orig.nraddr; \
3600 __asm__ volatile( \
3601 VALGRIND_ALIGN_STACK \
3602 "ldr x8, [%1] \n\t" /* target->x8 */ \
3603 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3604 VALGRIND_RESTORE_STACK \
3605 "mov %0, x0\n" \
3606 : /*out*/ "=r" (_res) \
3607 : /*in*/ "0" (&_argvec[0]) \
3608 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3609 ); \
3610 lval = (__typeof__(lval)) _res; \
3611 } while (0)
3612
3613#define CALL_FN_W_W(lval, orig, arg1) \
3614 do { \
3615 volatile OrigFn _orig = (orig); \
3616 volatile unsigned long _argvec[2]; \
3617 volatile unsigned long _res; \
3618 _argvec[0] = (unsigned long)_orig.nraddr; \
3619 _argvec[1] = (unsigned long)(arg1); \
3620 __asm__ volatile( \
3621 VALGRIND_ALIGN_STACK \
3622 "ldr x0, [%1, #8] \n\t" \
3623 "ldr x8, [%1] \n\t" /* target->x8 */ \
3624 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3625 VALGRIND_RESTORE_STACK \
3626 "mov %0, x0\n" \
3627 : /*out*/ "=r" (_res) \
3628 : /*in*/ "0" (&_argvec[0]) \
3629 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3630 ); \
3631 lval = (__typeof__(lval)) _res; \
3632 } while (0)
3633
3634#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3635 do { \
3636 volatile OrigFn _orig = (orig); \
3637 volatile unsigned long _argvec[3]; \
3638 volatile unsigned long _res; \
3639 _argvec[0] = (unsigned long)_orig.nraddr; \
3640 _argvec[1] = (unsigned long)(arg1); \
3641 _argvec[2] = (unsigned long)(arg2); \
3642 __asm__ volatile( \
3643 VALGRIND_ALIGN_STACK \
3644 "ldr x0, [%1, #8] \n\t" \
3645 "ldr x1, [%1, #16] \n\t" \
3646 "ldr x8, [%1] \n\t" /* target->x8 */ \
3647 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3648 VALGRIND_RESTORE_STACK \
3649 "mov %0, x0\n" \
3650 : /*out*/ "=r" (_res) \
3651 : /*in*/ "0" (&_argvec[0]) \
3652 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3653 ); \
3654 lval = (__typeof__(lval)) _res; \
3655 } while (0)
3656
3657#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3658 do { \
3659 volatile OrigFn _orig = (orig); \
3660 volatile unsigned long _argvec[4]; \
3661 volatile unsigned long _res; \
3662 _argvec[0] = (unsigned long)_orig.nraddr; \
3663 _argvec[1] = (unsigned long)(arg1); \
3664 _argvec[2] = (unsigned long)(arg2); \
3665 _argvec[3] = (unsigned long)(arg3); \
3666 __asm__ volatile( \
3667 VALGRIND_ALIGN_STACK \
3668 "ldr x0, [%1, #8] \n\t" \
3669 "ldr x1, [%1, #16] \n\t" \
3670 "ldr x2, [%1, #24] \n\t" \
3671 "ldr x8, [%1] \n\t" /* target->x8 */ \
3672 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3673 VALGRIND_RESTORE_STACK \
3674 "mov %0, x0\n" \
3675 : /*out*/ "=r" (_res) \
3676 : /*in*/ "0" (&_argvec[0]) \
3677 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3678 ); \
3679 lval = (__typeof__(lval)) _res; \
3680 } while (0)
3681
3682#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3683 do { \
3684 volatile OrigFn _orig = (orig); \
3685 volatile unsigned long _argvec[5]; \
3686 volatile unsigned long _res; \
3687 _argvec[0] = (unsigned long)_orig.nraddr; \
3688 _argvec[1] = (unsigned long)(arg1); \
3689 _argvec[2] = (unsigned long)(arg2); \
3690 _argvec[3] = (unsigned long)(arg3); \
3691 _argvec[4] = (unsigned long)(arg4); \
3692 __asm__ volatile( \
3693 VALGRIND_ALIGN_STACK \
3694 "ldr x0, [%1, #8] \n\t" \
3695 "ldr x1, [%1, #16] \n\t" \
3696 "ldr x2, [%1, #24] \n\t" \
3697 "ldr x3, [%1, #32] \n\t" \
3698 "ldr x8, [%1] \n\t" /* target->x8 */ \
3699 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3700 VALGRIND_RESTORE_STACK \
3701 "mov %0, x0" \
3702 : /*out*/ "=r" (_res) \
3703 : /*in*/ "0" (&_argvec[0]) \
3704 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3705 ); \
3706 lval = (__typeof__(lval)) _res; \
3707 } while (0)
3708
3709#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3710 do { \
3711 volatile OrigFn _orig = (orig); \
3712 volatile unsigned long _argvec[6]; \
3713 volatile unsigned long _res; \
3714 _argvec[0] = (unsigned long)_orig.nraddr; \
3715 _argvec[1] = (unsigned long)(arg1); \
3716 _argvec[2] = (unsigned long)(arg2); \
3717 _argvec[3] = (unsigned long)(arg3); \
3718 _argvec[4] = (unsigned long)(arg4); \
3719 _argvec[5] = (unsigned long)(arg5); \
3720 __asm__ volatile( \
3721 VALGRIND_ALIGN_STACK \
3722 "ldr x0, [%1, #8] \n\t" \
3723 "ldr x1, [%1, #16] \n\t" \
3724 "ldr x2, [%1, #24] \n\t" \
3725 "ldr x3, [%1, #32] \n\t" \
3726 "ldr x4, [%1, #40] \n\t" \
3727 "ldr x8, [%1] \n\t" /* target->x8 */ \
3728 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3729 VALGRIND_RESTORE_STACK \
3730 "mov %0, x0" \
3731 : /*out*/ "=r" (_res) \
3732 : /*in*/ "0" (&_argvec[0]) \
3733 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3734 ); \
3735 lval = (__typeof__(lval)) _res; \
3736 } while (0)
3737
3738#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3739 do { \
3740 volatile OrigFn _orig = (orig); \
3741 volatile unsigned long _argvec[7]; \
3742 volatile unsigned long _res; \
3743 _argvec[0] = (unsigned long)_orig.nraddr; \
3744 _argvec[1] = (unsigned long)(arg1); \
3745 _argvec[2] = (unsigned long)(arg2); \
3746 _argvec[3] = (unsigned long)(arg3); \
3747 _argvec[4] = (unsigned long)(arg4); \
3748 _argvec[5] = (unsigned long)(arg5); \
3749 _argvec[6] = (unsigned long)(arg6); \
3750 __asm__ volatile( \
3751 VALGRIND_ALIGN_STACK \
3752 "ldr x0, [%1, #8] \n\t" \
3753 "ldr x1, [%1, #16] \n\t" \
3754 "ldr x2, [%1, #24] \n\t" \
3755 "ldr x3, [%1, #32] \n\t" \
3756 "ldr x4, [%1, #40] \n\t" \
3757 "ldr x5, [%1, #48] \n\t" \
3758 "ldr x8, [%1] \n\t" /* target->x8 */ \
3759 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3760 VALGRIND_RESTORE_STACK \
3761 "mov %0, x0" \
3762 : /*out*/ "=r" (_res) \
3763 : /*in*/ "0" (&_argvec[0]) \
3764 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3765 ); \
3766 lval = (__typeof__(lval)) _res; \
3767 } while (0)
3768
3769#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3770 arg7) \
3771 do { \
3772 volatile OrigFn _orig = (orig); \
3773 volatile unsigned long _argvec[8]; \
3774 volatile unsigned long _res; \
3775 _argvec[0] = (unsigned long)_orig.nraddr; \
3776 _argvec[1] = (unsigned long)(arg1); \
3777 _argvec[2] = (unsigned long)(arg2); \
3778 _argvec[3] = (unsigned long)(arg3); \
3779 _argvec[4] = (unsigned long)(arg4); \
3780 _argvec[5] = (unsigned long)(arg5); \
3781 _argvec[6] = (unsigned long)(arg6); \
3782 _argvec[7] = (unsigned long)(arg7); \
3783 __asm__ volatile( \
3784 VALGRIND_ALIGN_STACK \
3785 "ldr x0, [%1, #8] \n\t" \
3786 "ldr x1, [%1, #16] \n\t" \
3787 "ldr x2, [%1, #24] \n\t" \
3788 "ldr x3, [%1, #32] \n\t" \
3789 "ldr x4, [%1, #40] \n\t" \
3790 "ldr x5, [%1, #48] \n\t" \
3791 "ldr x6, [%1, #56] \n\t" \
3792 "ldr x8, [%1] \n\t" /* target->x8 */ \
3793 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3794 VALGRIND_RESTORE_STACK \
3795 "mov %0, x0" \
3796 : /*out*/ "=r" (_res) \
3797 : /*in*/ "0" (&_argvec[0]) \
3798 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3799 ); \
3800 lval = (__typeof__(lval)) _res; \
3801 } while (0)
3802
3803#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3804 arg7,arg8) \
3805 do { \
3806 volatile OrigFn _orig = (orig); \
3807 volatile unsigned long _argvec[9]; \
3808 volatile unsigned long _res; \
3809 _argvec[0] = (unsigned long)_orig.nraddr; \
3810 _argvec[1] = (unsigned long)(arg1); \
3811 _argvec[2] = (unsigned long)(arg2); \
3812 _argvec[3] = (unsigned long)(arg3); \
3813 _argvec[4] = (unsigned long)(arg4); \
3814 _argvec[5] = (unsigned long)(arg5); \
3815 _argvec[6] = (unsigned long)(arg6); \
3816 _argvec[7] = (unsigned long)(arg7); \
3817 _argvec[8] = (unsigned long)(arg8); \
3818 __asm__ volatile( \
3819 VALGRIND_ALIGN_STACK \
3820 "ldr x0, [%1, #8] \n\t" \
3821 "ldr x1, [%1, #16] \n\t" \
3822 "ldr x2, [%1, #24] \n\t" \
3823 "ldr x3, [%1, #32] \n\t" \
3824 "ldr x4, [%1, #40] \n\t" \
3825 "ldr x5, [%1, #48] \n\t" \
3826 "ldr x6, [%1, #56] \n\t" \
3827 "ldr x7, [%1, #64] \n\t" \
3828 "ldr x8, [%1] \n\t" /* target->x8 */ \
3829 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3830 VALGRIND_RESTORE_STACK \
3831 "mov %0, x0" \
3832 : /*out*/ "=r" (_res) \
3833 : /*in*/ "0" (&_argvec[0]) \
3834 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3835 ); \
3836 lval = (__typeof__(lval)) _res; \
3837 } while (0)
3838
3839#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3840 arg7,arg8,arg9) \
3841 do { \
3842 volatile OrigFn _orig = (orig); \
3843 volatile unsigned long _argvec[10]; \
3844 volatile unsigned long _res; \
3845 _argvec[0] = (unsigned long)_orig.nraddr; \
3846 _argvec[1] = (unsigned long)(arg1); \
3847 _argvec[2] = (unsigned long)(arg2); \
3848 _argvec[3] = (unsigned long)(arg3); \
3849 _argvec[4] = (unsigned long)(arg4); \
3850 _argvec[5] = (unsigned long)(arg5); \
3851 _argvec[6] = (unsigned long)(arg6); \
3852 _argvec[7] = (unsigned long)(arg7); \
3853 _argvec[8] = (unsigned long)(arg8); \
3854 _argvec[9] = (unsigned long)(arg9); \
3855 __asm__ volatile( \
3856 VALGRIND_ALIGN_STACK \
3857 "sub sp, sp, #0x20 \n\t" \
3858 "ldr x0, [%1, #8] \n\t" \
3859 "ldr x1, [%1, #16] \n\t" \
3860 "ldr x2, [%1, #24] \n\t" \
3861 "ldr x3, [%1, #32] \n\t" \
3862 "ldr x4, [%1, #40] \n\t" \
3863 "ldr x5, [%1, #48] \n\t" \
3864 "ldr x6, [%1, #56] \n\t" \
3865 "ldr x7, [%1, #64] \n\t" \
3866 "ldr x8, [%1, #72] \n\t" \
3867 "str x8, [sp, #0] \n\t" \
3868 "ldr x8, [%1] \n\t" /* target->x8 */ \
3869 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3870 VALGRIND_RESTORE_STACK \
3871 "mov %0, x0" \
3872 : /*out*/ "=r" (_res) \
3873 : /*in*/ "0" (&_argvec[0]) \
3874 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3875 ); \
3876 lval = (__typeof__(lval)) _res; \
3877 } while (0)
3878
3879#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3880 arg7,arg8,arg9,arg10) \
3881 do { \
3882 volatile OrigFn _orig = (orig); \
3883 volatile unsigned long _argvec[11]; \
3884 volatile unsigned long _res; \
3885 _argvec[0] = (unsigned long)_orig.nraddr; \
3886 _argvec[1] = (unsigned long)(arg1); \
3887 _argvec[2] = (unsigned long)(arg2); \
3888 _argvec[3] = (unsigned long)(arg3); \
3889 _argvec[4] = (unsigned long)(arg4); \
3890 _argvec[5] = (unsigned long)(arg5); \
3891 _argvec[6] = (unsigned long)(arg6); \
3892 _argvec[7] = (unsigned long)(arg7); \
3893 _argvec[8] = (unsigned long)(arg8); \
3894 _argvec[9] = (unsigned long)(arg9); \
3895 _argvec[10] = (unsigned long)(arg10); \
3896 __asm__ volatile( \
3897 VALGRIND_ALIGN_STACK \
3898 "sub sp, sp, #0x20 \n\t" \
3899 "ldr x0, [%1, #8] \n\t" \
3900 "ldr x1, [%1, #16] \n\t" \
3901 "ldr x2, [%1, #24] \n\t" \
3902 "ldr x3, [%1, #32] \n\t" \
3903 "ldr x4, [%1, #40] \n\t" \
3904 "ldr x5, [%1, #48] \n\t" \
3905 "ldr x6, [%1, #56] \n\t" \
3906 "ldr x7, [%1, #64] \n\t" \
3907 "ldr x8, [%1, #72] \n\t" \
3908 "str x8, [sp, #0] \n\t" \
3909 "ldr x8, [%1, #80] \n\t" \
3910 "str x8, [sp, #8] \n\t" \
3911 "ldr x8, [%1] \n\t" /* target->x8 */ \
3912 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3913 VALGRIND_RESTORE_STACK \
3914 "mov %0, x0" \
3915 : /*out*/ "=r" (_res) \
3916 : /*in*/ "0" (&_argvec[0]) \
3917 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3918 ); \
3919 lval = (__typeof__(lval)) _res; \
3920 } while (0)
3921
3922#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3923 arg7,arg8,arg9,arg10,arg11) \
3924 do { \
3925 volatile OrigFn _orig = (orig); \
3926 volatile unsigned long _argvec[12]; \
3927 volatile unsigned long _res; \
3928 _argvec[0] = (unsigned long)_orig.nraddr; \
3929 _argvec[1] = (unsigned long)(arg1); \
3930 _argvec[2] = (unsigned long)(arg2); \
3931 _argvec[3] = (unsigned long)(arg3); \
3932 _argvec[4] = (unsigned long)(arg4); \
3933 _argvec[5] = (unsigned long)(arg5); \
3934 _argvec[6] = (unsigned long)(arg6); \
3935 _argvec[7] = (unsigned long)(arg7); \
3936 _argvec[8] = (unsigned long)(arg8); \
3937 _argvec[9] = (unsigned long)(arg9); \
3938 _argvec[10] = (unsigned long)(arg10); \
3939 _argvec[11] = (unsigned long)(arg11); \
3940 __asm__ volatile( \
3941 VALGRIND_ALIGN_STACK \
3942 "sub sp, sp, #0x30 \n\t" \
3943 "ldr x0, [%1, #8] \n\t" \
3944 "ldr x1, [%1, #16] \n\t" \
3945 "ldr x2, [%1, #24] \n\t" \
3946 "ldr x3, [%1, #32] \n\t" \
3947 "ldr x4, [%1, #40] \n\t" \
3948 "ldr x5, [%1, #48] \n\t" \
3949 "ldr x6, [%1, #56] \n\t" \
3950 "ldr x7, [%1, #64] \n\t" \
3951 "ldr x8, [%1, #72] \n\t" \
3952 "str x8, [sp, #0] \n\t" \
3953 "ldr x8, [%1, #80] \n\t" \
3954 "str x8, [sp, #8] \n\t" \
3955 "ldr x8, [%1, #88] \n\t" \
3956 "str x8, [sp, #16] \n\t" \
3957 "ldr x8, [%1] \n\t" /* target->x8 */ \
3958 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3959 VALGRIND_RESTORE_STACK \
3960 "mov %0, x0" \
3961 : /*out*/ "=r" (_res) \
3962 : /*in*/ "0" (&_argvec[0]) \
3963 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3964 ); \
3965 lval = (__typeof__(lval)) _res; \
3966 } while (0)
3967
3968#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3969 arg7,arg8,arg9,arg10,arg11, \
3970 arg12) \
3971 do { \
3972 volatile OrigFn _orig = (orig); \
3973 volatile unsigned long _argvec[13]; \
3974 volatile unsigned long _res; \
3975 _argvec[0] = (unsigned long)_orig.nraddr; \
3976 _argvec[1] = (unsigned long)(arg1); \
3977 _argvec[2] = (unsigned long)(arg2); \
3978 _argvec[3] = (unsigned long)(arg3); \
3979 _argvec[4] = (unsigned long)(arg4); \
3980 _argvec[5] = (unsigned long)(arg5); \
3981 _argvec[6] = (unsigned long)(arg6); \
3982 _argvec[7] = (unsigned long)(arg7); \
3983 _argvec[8] = (unsigned long)(arg8); \
3984 _argvec[9] = (unsigned long)(arg9); \
3985 _argvec[10] = (unsigned long)(arg10); \
3986 _argvec[11] = (unsigned long)(arg11); \
3987 _argvec[12] = (unsigned long)(arg12); \
3988 __asm__ volatile( \
3989 VALGRIND_ALIGN_STACK \
3990 "sub sp, sp, #0x30 \n\t" \
3991 "ldr x0, [%1, #8] \n\t" \
3992 "ldr x1, [%1, #16] \n\t" \
3993 "ldr x2, [%1, #24] \n\t" \
3994 "ldr x3, [%1, #32] \n\t" \
3995 "ldr x4, [%1, #40] \n\t" \
3996 "ldr x5, [%1, #48] \n\t" \
3997 "ldr x6, [%1, #56] \n\t" \
3998 "ldr x7, [%1, #64] \n\t" \
3999 "ldr x8, [%1, #72] \n\t" \
4000 "str x8, [sp, #0] \n\t" \
4001 "ldr x8, [%1, #80] \n\t" \
4002 "str x8, [sp, #8] \n\t" \
4003 "ldr x8, [%1, #88] \n\t" \
4004 "str x8, [sp, #16] \n\t" \
4005 "ldr x8, [%1, #96] \n\t" \
4006 "str x8, [sp, #24] \n\t" \
4007 "ldr x8, [%1] \n\t" /* target->x8 */ \
4008 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4009 VALGRIND_RESTORE_STACK \
4010 "mov %0, x0" \
4011 : /*out*/ "=r" (_res) \
4012 : /*in*/ "0" (&_argvec[0]) \
4013 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4014 ); \
4015 lval = (__typeof__(lval)) _res; \
4016 } while (0)
4017
4018#endif /* PLAT_arm64_linux */
4019
4020/* ------------------------- s390x-linux ------------------------- */
4021
4022#if defined(PLAT_s390x_linux)
4023
4024/* Similar workaround as amd64 (see above), but we use r11 as frame
4025 pointer and save the old r11 in r7. r11 might be used for
4026 argvec, therefore we copy argvec in r1 since r1 is clobbered
4027 after the call anyway. */
4028#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4029# define __FRAME_POINTER \
4030 ,"d"(__builtin_dwarf_cfa())
4031# define VALGRIND_CFI_PROLOGUE \
4032 ".cfi_remember_state\n\t" \
4033 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4034 "lgr 7,11\n\t" \
4035 "lgr 11,%2\n\t" \
4036 ".cfi_def_cfa r11, 0\n\t"
4037# define VALGRIND_CFI_EPILOGUE \
4038 "lgr 11, 7\n\t" \
4039 ".cfi_restore_state\n\t"
4040#else
4041# define __FRAME_POINTER
4042# define VALGRIND_CFI_PROLOGUE \
4043 "lgr 1,%1\n\t"
4044# define VALGRIND_CFI_EPILOGUE
4045#endif
4046
4047/* Nb: On s390 the stack pointer is properly aligned *at all times*
4048 according to the s390 GCC maintainer. (The ABI specification is not
4049 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4050 VALGRIND_RESTORE_STACK are not defined here. */
4051
4052/* These regs are trashed by the hidden call. Note that we overwrite
4053 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4054 function a proper return address. All others are ABI defined call
4055 clobbers. */
4056#define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4057 "f0","f1","f2","f3","f4","f5","f6","f7"
4058
4059/* Nb: Although r11 is modified in the asm snippets below (inside
4060 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4061 two reasons:
4062 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4063 modified
4064 (2) GCC will complain that r11 cannot appear inside a clobber section,
4065 when compiled with -O -fno-omit-frame-pointer
4066 */
4067
4068#define CALL_FN_W_v(lval, orig) \
4069 do { \
4070 volatile OrigFn _orig = (orig); \
4071 volatile unsigned long _argvec[1]; \
4072 volatile unsigned long _res; \
4073 _argvec[0] = (unsigned long)_orig.nraddr; \
4074 __asm__ volatile( \
4075 VALGRIND_CFI_PROLOGUE \
4076 "aghi 15,-160\n\t" \
4077 "lg 1, 0(1)\n\t" /* target->r1 */ \
4078 VALGRIND_CALL_NOREDIR_R1 \
4079 "lgr %0, 2\n\t" \
4080 "aghi 15,160\n\t" \
4081 VALGRIND_CFI_EPILOGUE \
4082 : /*out*/ "=d" (_res) \
4083 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4084 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4085 ); \
4086 lval = (__typeof__(lval)) _res; \
4087 } while (0)
4088
4089/* The call abi has the arguments in r2-r6 and stack */
4090#define CALL_FN_W_W(lval, orig, arg1) \
4091 do { \
4092 volatile OrigFn _orig = (orig); \
4093 volatile unsigned long _argvec[2]; \
4094 volatile unsigned long _res; \
4095 _argvec[0] = (unsigned long)_orig.nraddr; \
4096 _argvec[1] = (unsigned long)arg1; \
4097 __asm__ volatile( \
4098 VALGRIND_CFI_PROLOGUE \
4099 "aghi 15,-160\n\t" \
4100 "lg 2, 8(1)\n\t" \
4101 "lg 1, 0(1)\n\t" \
4102 VALGRIND_CALL_NOREDIR_R1 \
4103 "lgr %0, 2\n\t" \
4104 "aghi 15,160\n\t" \
4105 VALGRIND_CFI_EPILOGUE \
4106 : /*out*/ "=d" (_res) \
4107 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4108 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4109 ); \
4110 lval = (__typeof__(lval)) _res; \
4111 } while (0)
4112
4113#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4114 do { \
4115 volatile OrigFn _orig = (orig); \
4116 volatile unsigned long _argvec[3]; \
4117 volatile unsigned long _res; \
4118 _argvec[0] = (unsigned long)_orig.nraddr; \
4119 _argvec[1] = (unsigned long)arg1; \
4120 _argvec[2] = (unsigned long)arg2; \
4121 __asm__ volatile( \
4122 VALGRIND_CFI_PROLOGUE \
4123 "aghi 15,-160\n\t" \
4124 "lg 2, 8(1)\n\t" \
4125 "lg 3,16(1)\n\t" \
4126 "lg 1, 0(1)\n\t" \
4127 VALGRIND_CALL_NOREDIR_R1 \
4128 "lgr %0, 2\n\t" \
4129 "aghi 15,160\n\t" \
4130 VALGRIND_CFI_EPILOGUE \
4131 : /*out*/ "=d" (_res) \
4132 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4133 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4134 ); \
4135 lval = (__typeof__(lval)) _res; \
4136 } while (0)
4137
4138#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4139 do { \
4140 volatile OrigFn _orig = (orig); \
4141 volatile unsigned long _argvec[4]; \
4142 volatile unsigned long _res; \
4143 _argvec[0] = (unsigned long)_orig.nraddr; \
4144 _argvec[1] = (unsigned long)arg1; \
4145 _argvec[2] = (unsigned long)arg2; \
4146 _argvec[3] = (unsigned long)arg3; \
4147 __asm__ volatile( \
4148 VALGRIND_CFI_PROLOGUE \
4149 "aghi 15,-160\n\t" \
4150 "lg 2, 8(1)\n\t" \
4151 "lg 3,16(1)\n\t" \
4152 "lg 4,24(1)\n\t" \
4153 "lg 1, 0(1)\n\t" \
4154 VALGRIND_CALL_NOREDIR_R1 \
4155 "lgr %0, 2\n\t" \
4156 "aghi 15,160\n\t" \
4157 VALGRIND_CFI_EPILOGUE \
4158 : /*out*/ "=d" (_res) \
4159 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4160 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4161 ); \
4162 lval = (__typeof__(lval)) _res; \
4163 } while (0)
4164
4165#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4166 do { \
4167 volatile OrigFn _orig = (orig); \
4168 volatile unsigned long _argvec[5]; \
4169 volatile unsigned long _res; \
4170 _argvec[0] = (unsigned long)_orig.nraddr; \
4171 _argvec[1] = (unsigned long)arg1; \
4172 _argvec[2] = (unsigned long)arg2; \
4173 _argvec[3] = (unsigned long)arg3; \
4174 _argvec[4] = (unsigned long)arg4; \
4175 __asm__ volatile( \
4176 VALGRIND_CFI_PROLOGUE \
4177 "aghi 15,-160\n\t" \
4178 "lg 2, 8(1)\n\t" \
4179 "lg 3,16(1)\n\t" \
4180 "lg 4,24(1)\n\t" \
4181 "lg 5,32(1)\n\t" \
4182 "lg 1, 0(1)\n\t" \
4183 VALGRIND_CALL_NOREDIR_R1 \
4184 "lgr %0, 2\n\t" \
4185 "aghi 15,160\n\t" \
4186 VALGRIND_CFI_EPILOGUE \
4187 : /*out*/ "=d" (_res) \
4188 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4189 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4190 ); \
4191 lval = (__typeof__(lval)) _res; \
4192 } while (0)
4193
4194#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4195 do { \
4196 volatile OrigFn _orig = (orig); \
4197 volatile unsigned long _argvec[6]; \
4198 volatile unsigned long _res; \
4199 _argvec[0] = (unsigned long)_orig.nraddr; \
4200 _argvec[1] = (unsigned long)arg1; \
4201 _argvec[2] = (unsigned long)arg2; \
4202 _argvec[3] = (unsigned long)arg3; \
4203 _argvec[4] = (unsigned long)arg4; \
4204 _argvec[5] = (unsigned long)arg5; \
4205 __asm__ volatile( \
4206 VALGRIND_CFI_PROLOGUE \
4207 "aghi 15,-160\n\t" \
4208 "lg 2, 8(1)\n\t" \
4209 "lg 3,16(1)\n\t" \
4210 "lg 4,24(1)\n\t" \
4211 "lg 5,32(1)\n\t" \
4212 "lg 6,40(1)\n\t" \
4213 "lg 1, 0(1)\n\t" \
4214 VALGRIND_CALL_NOREDIR_R1 \
4215 "lgr %0, 2\n\t" \
4216 "aghi 15,160\n\t" \
4217 VALGRIND_CFI_EPILOGUE \
4218 : /*out*/ "=d" (_res) \
4219 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4220 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4221 ); \
4222 lval = (__typeof__(lval)) _res; \
4223 } while (0)
4224
4225#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4226 arg6) \
4227 do { \
4228 volatile OrigFn _orig = (orig); \
4229 volatile unsigned long _argvec[7]; \
4230 volatile unsigned long _res; \
4231 _argvec[0] = (unsigned long)_orig.nraddr; \
4232 _argvec[1] = (unsigned long)arg1; \
4233 _argvec[2] = (unsigned long)arg2; \
4234 _argvec[3] = (unsigned long)arg3; \
4235 _argvec[4] = (unsigned long)arg4; \
4236 _argvec[5] = (unsigned long)arg5; \
4237 _argvec[6] = (unsigned long)arg6; \
4238 __asm__ volatile( \
4239 VALGRIND_CFI_PROLOGUE \
4240 "aghi 15,-168\n\t" \
4241 "lg 2, 8(1)\n\t" \
4242 "lg 3,16(1)\n\t" \
4243 "lg 4,24(1)\n\t" \
4244 "lg 5,32(1)\n\t" \
4245 "lg 6,40(1)\n\t" \
4246 "mvc 160(8,15), 48(1)\n\t" \
4247 "lg 1, 0(1)\n\t" \
4248 VALGRIND_CALL_NOREDIR_R1 \
4249 "lgr %0, 2\n\t" \
4250 "aghi 15,168\n\t" \
4251 VALGRIND_CFI_EPILOGUE \
4252 : /*out*/ "=d" (_res) \
4253 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4254 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4255 ); \
4256 lval = (__typeof__(lval)) _res; \
4257 } while (0)
4258
4259#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4260 arg6, arg7) \
4261 do { \
4262 volatile OrigFn _orig = (orig); \
4263 volatile unsigned long _argvec[8]; \
4264 volatile unsigned long _res; \
4265 _argvec[0] = (unsigned long)_orig.nraddr; \
4266 _argvec[1] = (unsigned long)arg1; \
4267 _argvec[2] = (unsigned long)arg2; \
4268 _argvec[3] = (unsigned long)arg3; \
4269 _argvec[4] = (unsigned long)arg4; \
4270 _argvec[5] = (unsigned long)arg5; \
4271 _argvec[6] = (unsigned long)arg6; \
4272 _argvec[7] = (unsigned long)arg7; \
4273 __asm__ volatile( \
4274 VALGRIND_CFI_PROLOGUE \
4275 "aghi 15,-176\n\t" \
4276 "lg 2, 8(1)\n\t" \
4277 "lg 3,16(1)\n\t" \
4278 "lg 4,24(1)\n\t" \
4279 "lg 5,32(1)\n\t" \
4280 "lg 6,40(1)\n\t" \
4281 "mvc 160(8,15), 48(1)\n\t" \
4282 "mvc 168(8,15), 56(1)\n\t" \
4283 "lg 1, 0(1)\n\t" \
4284 VALGRIND_CALL_NOREDIR_R1 \
4285 "lgr %0, 2\n\t" \
4286 "aghi 15,176\n\t" \
4287 VALGRIND_CFI_EPILOGUE \
4288 : /*out*/ "=d" (_res) \
4289 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4290 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4291 ); \
4292 lval = (__typeof__(lval)) _res; \
4293 } while (0)
4294
4295#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4296 arg6, arg7 ,arg8) \
4297 do { \
4298 volatile OrigFn _orig = (orig); \
4299 volatile unsigned long _argvec[9]; \
4300 volatile unsigned long _res; \
4301 _argvec[0] = (unsigned long)_orig.nraddr; \
4302 _argvec[1] = (unsigned long)arg1; \
4303 _argvec[2] = (unsigned long)arg2; \
4304 _argvec[3] = (unsigned long)arg3; \
4305 _argvec[4] = (unsigned long)arg4; \
4306 _argvec[5] = (unsigned long)arg5; \
4307 _argvec[6] = (unsigned long)arg6; \
4308 _argvec[7] = (unsigned long)arg7; \
4309 _argvec[8] = (unsigned long)arg8; \
4310 __asm__ volatile( \
4311 VALGRIND_CFI_PROLOGUE \
4312 "aghi 15,-184\n\t" \
4313 "lg 2, 8(1)\n\t" \
4314 "lg 3,16(1)\n\t" \
4315 "lg 4,24(1)\n\t" \
4316 "lg 5,32(1)\n\t" \
4317 "lg 6,40(1)\n\t" \
4318 "mvc 160(8,15), 48(1)\n\t" \
4319 "mvc 168(8,15), 56(1)\n\t" \
4320 "mvc 176(8,15), 64(1)\n\t" \
4321 "lg 1, 0(1)\n\t" \
4322 VALGRIND_CALL_NOREDIR_R1 \
4323 "lgr %0, 2\n\t" \
4324 "aghi 15,184\n\t" \
4325 VALGRIND_CFI_EPILOGUE \
4326 : /*out*/ "=d" (_res) \
4327 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4328 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4329 ); \
4330 lval = (__typeof__(lval)) _res; \
4331 } while (0)
4332
4333#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4334 arg6, arg7 ,arg8, arg9) \
4335 do { \
4336 volatile OrigFn _orig = (orig); \
4337 volatile unsigned long _argvec[10]; \
4338 volatile unsigned long _res; \
4339 _argvec[0] = (unsigned long)_orig.nraddr; \
4340 _argvec[1] = (unsigned long)arg1; \
4341 _argvec[2] = (unsigned long)arg2; \
4342 _argvec[3] = (unsigned long)arg3; \
4343 _argvec[4] = (unsigned long)arg4; \
4344 _argvec[5] = (unsigned long)arg5; \
4345 _argvec[6] = (unsigned long)arg6; \
4346 _argvec[7] = (unsigned long)arg7; \
4347 _argvec[8] = (unsigned long)arg8; \
4348 _argvec[9] = (unsigned long)arg9; \
4349 __asm__ volatile( \
4350 VALGRIND_CFI_PROLOGUE \
4351 "aghi 15,-192\n\t" \
4352 "lg 2, 8(1)\n\t" \
4353 "lg 3,16(1)\n\t" \
4354 "lg 4,24(1)\n\t" \
4355 "lg 5,32(1)\n\t" \
4356 "lg 6,40(1)\n\t" \
4357 "mvc 160(8,15), 48(1)\n\t" \
4358 "mvc 168(8,15), 56(1)\n\t" \
4359 "mvc 176(8,15), 64(1)\n\t" \
4360 "mvc 184(8,15), 72(1)\n\t" \
4361 "lg 1, 0(1)\n\t" \
4362 VALGRIND_CALL_NOREDIR_R1 \
4363 "lgr %0, 2\n\t" \
4364 "aghi 15,192\n\t" \
4365 VALGRIND_CFI_EPILOGUE \
4366 : /*out*/ "=d" (_res) \
4367 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4368 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4369 ); \
4370 lval = (__typeof__(lval)) _res; \
4371 } while (0)
4372
4373#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4374 arg6, arg7 ,arg8, arg9, arg10) \
4375 do { \
4376 volatile OrigFn _orig = (orig); \
4377 volatile unsigned long _argvec[11]; \
4378 volatile unsigned long _res; \
4379 _argvec[0] = (unsigned long)_orig.nraddr; \
4380 _argvec[1] = (unsigned long)arg1; \
4381 _argvec[2] = (unsigned long)arg2; \
4382 _argvec[3] = (unsigned long)arg3; \
4383 _argvec[4] = (unsigned long)arg4; \
4384 _argvec[5] = (unsigned long)arg5; \
4385 _argvec[6] = (unsigned long)arg6; \
4386 _argvec[7] = (unsigned long)arg7; \
4387 _argvec[8] = (unsigned long)arg8; \
4388 _argvec[9] = (unsigned long)arg9; \
4389 _argvec[10] = (unsigned long)arg10; \
4390 __asm__ volatile( \
4391 VALGRIND_CFI_PROLOGUE \
4392 "aghi 15,-200\n\t" \
4393 "lg 2, 8(1)\n\t" \
4394 "lg 3,16(1)\n\t" \
4395 "lg 4,24(1)\n\t" \
4396 "lg 5,32(1)\n\t" \
4397 "lg 6,40(1)\n\t" \
4398 "mvc 160(8,15), 48(1)\n\t" \
4399 "mvc 168(8,15), 56(1)\n\t" \
4400 "mvc 176(8,15), 64(1)\n\t" \
4401 "mvc 184(8,15), 72(1)\n\t" \
4402 "mvc 192(8,15), 80(1)\n\t" \
4403 "lg 1, 0(1)\n\t" \
4404 VALGRIND_CALL_NOREDIR_R1 \
4405 "lgr %0, 2\n\t" \
4406 "aghi 15,200\n\t" \
4407 VALGRIND_CFI_EPILOGUE \
4408 : /*out*/ "=d" (_res) \
4409 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4410 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4411 ); \
4412 lval = (__typeof__(lval)) _res; \
4413 } while (0)
4414
4415#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4416 arg6, arg7 ,arg8, arg9, arg10, arg11) \
4417 do { \
4418 volatile OrigFn _orig = (orig); \
4419 volatile unsigned long _argvec[12]; \
4420 volatile unsigned long _res; \
4421 _argvec[0] = (unsigned long)_orig.nraddr; \
4422 _argvec[1] = (unsigned long)arg1; \
4423 _argvec[2] = (unsigned long)arg2; \
4424 _argvec[3] = (unsigned long)arg3; \
4425 _argvec[4] = (unsigned long)arg4; \
4426 _argvec[5] = (unsigned long)arg5; \
4427 _argvec[6] = (unsigned long)arg6; \
4428 _argvec[7] = (unsigned long)arg7; \
4429 _argvec[8] = (unsigned long)arg8; \
4430 _argvec[9] = (unsigned long)arg9; \
4431 _argvec[10] = (unsigned long)arg10; \
4432 _argvec[11] = (unsigned long)arg11; \
4433 __asm__ volatile( \
4434 VALGRIND_CFI_PROLOGUE \
4435 "aghi 15,-208\n\t" \
4436 "lg 2, 8(1)\n\t" \
4437 "lg 3,16(1)\n\t" \
4438 "lg 4,24(1)\n\t" \
4439 "lg 5,32(1)\n\t" \
4440 "lg 6,40(1)\n\t" \
4441 "mvc 160(8,15), 48(1)\n\t" \
4442 "mvc 168(8,15), 56(1)\n\t" \
4443 "mvc 176(8,15), 64(1)\n\t" \
4444 "mvc 184(8,15), 72(1)\n\t" \
4445 "mvc 192(8,15), 80(1)\n\t" \
4446 "mvc 200(8,15), 88(1)\n\t" \
4447 "lg 1, 0(1)\n\t" \
4448 VALGRIND_CALL_NOREDIR_R1 \
4449 "lgr %0, 2\n\t" \
4450 "aghi 15,208\n\t" \
4451 VALGRIND_CFI_EPILOGUE \
4452 : /*out*/ "=d" (_res) \
4453 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4454 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4455 ); \
4456 lval = (__typeof__(lval)) _res; \
4457 } while (0)
4458
4459#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4460 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
4461 do { \
4462 volatile OrigFn _orig = (orig); \
4463 volatile unsigned long _argvec[13]; \
4464 volatile unsigned long _res; \
4465 _argvec[0] = (unsigned long)_orig.nraddr; \
4466 _argvec[1] = (unsigned long)arg1; \
4467 _argvec[2] = (unsigned long)arg2; \
4468 _argvec[3] = (unsigned long)arg3; \
4469 _argvec[4] = (unsigned long)arg4; \
4470 _argvec[5] = (unsigned long)arg5; \
4471 _argvec[6] = (unsigned long)arg6; \
4472 _argvec[7] = (unsigned long)arg7; \
4473 _argvec[8] = (unsigned long)arg8; \
4474 _argvec[9] = (unsigned long)arg9; \
4475 _argvec[10] = (unsigned long)arg10; \
4476 _argvec[11] = (unsigned long)arg11; \
4477 _argvec[12] = (unsigned long)arg12; \
4478 __asm__ volatile( \
4479 VALGRIND_CFI_PROLOGUE \
4480 "aghi 15,-216\n\t" \
4481 "lg 2, 8(1)\n\t" \
4482 "lg 3,16(1)\n\t" \
4483 "lg 4,24(1)\n\t" \
4484 "lg 5,32(1)\n\t" \
4485 "lg 6,40(1)\n\t" \
4486 "mvc 160(8,15), 48(1)\n\t" \
4487 "mvc 168(8,15), 56(1)\n\t" \
4488 "mvc 176(8,15), 64(1)\n\t" \
4489 "mvc 184(8,15), 72(1)\n\t" \
4490 "mvc 192(8,15), 80(1)\n\t" \
4491 "mvc 200(8,15), 88(1)\n\t" \
4492 "mvc 208(8,15), 96(1)\n\t" \
4493 "lg 1, 0(1)\n\t" \
4494 VALGRIND_CALL_NOREDIR_R1 \
4495 "lgr %0, 2\n\t" \
4496 "aghi 15,216\n\t" \
4497 VALGRIND_CFI_EPILOGUE \
4498 : /*out*/ "=d" (_res) \
4499 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4500 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4501 ); \
4502 lval = (__typeof__(lval)) _res; \
4503 } while (0)
4504
4505
4506#endif /* PLAT_s390x_linux */
4507
4508/* ------------------------- mips32-linux ----------------------- */
4509
4510#if defined(PLAT_mips32_linux)
4511
4512/* These regs are trashed by the hidden call. */
4513#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
4514"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
4515"$25", "$31"
4516
4517/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
4518 long) == 4. */
4519
4520#define CALL_FN_W_v(lval, orig) \
4521 do { \
4522 volatile OrigFn _orig = (orig); \
4523 volatile unsigned long _argvec[1]; \
4524 volatile unsigned long _res; \
4525 _argvec[0] = (unsigned long)_orig.nraddr; \
4526 __asm__ volatile( \
4527 "subu $29, $29, 8 \n\t" \
4528 "sw $28, 0($29) \n\t" \
4529 "sw $31, 4($29) \n\t" \
4530 "subu $29, $29, 16 \n\t" \
4531 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4532 VALGRIND_CALL_NOREDIR_T9 \
4533 "addu $29, $29, 16\n\t" \
4534 "lw $28, 0($29) \n\t" \
4535 "lw $31, 4($29) \n\t" \
4536 "addu $29, $29, 8 \n\t" \
4537 "move %0, $2\n" \
4538 : /*out*/ "=r" (_res) \
4539 : /*in*/ "0" (&_argvec[0]) \
4540 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4541 ); \
4542 lval = (__typeof__(lval)) _res; \
4543 } while (0)
4544
4545#define CALL_FN_W_W(lval, orig, arg1) \
4546 do { \
4547 volatile OrigFn _orig = (orig); \
4548 volatile unsigned long _argvec[2]; \
4549 volatile unsigned long _res; \
4550 _argvec[0] = (unsigned long)_orig.nraddr; \
4551 _argvec[1] = (unsigned long)(arg1); \
4552 __asm__ volatile( \
4553 "subu $29, $29, 8 \n\t" \
4554 "sw $28, 0($29) \n\t" \
4555 "sw $31, 4($29) \n\t" \
4556 "subu $29, $29, 16 \n\t" \
4557 "lw $4, 4(%1) \n\t" /* arg1*/ \
4558 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4559 VALGRIND_CALL_NOREDIR_T9 \
4560 "addu $29, $29, 16 \n\t" \
4561 "lw $28, 0($29) \n\t" \
4562 "lw $31, 4($29) \n\t" \
4563 "addu $29, $29, 8 \n\t" \
4564 "move %0, $2\n" \
4565 : /*out*/ "=r" (_res) \
4566 : /*in*/ "0" (&_argvec[0]) \
4567 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4568 ); \
4569 lval = (__typeof__(lval)) _res; \
4570 } while (0)
4571
4572#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4573 do { \
4574 volatile OrigFn _orig = (orig); \
4575 volatile unsigned long _argvec[3]; \
4576 volatile unsigned long _res; \
4577 _argvec[0] = (unsigned long)_orig.nraddr; \
4578 _argvec[1] = (unsigned long)(arg1); \
4579 _argvec[2] = (unsigned long)(arg2); \
4580 __asm__ volatile( \
4581 "subu $29, $29, 8 \n\t" \
4582 "sw $28, 0($29) \n\t" \
4583 "sw $31, 4($29) \n\t" \
4584 "subu $29, $29, 16 \n\t" \
4585 "lw $4, 4(%1) \n\t" \
4586 "lw $5, 8(%1) \n\t" \
4587 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4588 VALGRIND_CALL_NOREDIR_T9 \
4589 "addu $29, $29, 16 \n\t" \
4590 "lw $28, 0($29) \n\t" \
4591 "lw $31, 4($29) \n\t" \
4592 "addu $29, $29, 8 \n\t" \
4593 "move %0, $2\n" \
4594 : /*out*/ "=r" (_res) \
4595 : /*in*/ "0" (&_argvec[0]) \
4596 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4597 ); \
4598 lval = (__typeof__(lval)) _res; \
4599 } while (0)
4600
4601#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4602 do { \
4603 volatile OrigFn _orig = (orig); \
4604 volatile unsigned long _argvec[4]; \
4605 volatile unsigned long _res; \
4606 _argvec[0] = (unsigned long)_orig.nraddr; \
4607 _argvec[1] = (unsigned long)(arg1); \
4608 _argvec[2] = (unsigned long)(arg2); \
4609 _argvec[3] = (unsigned long)(arg3); \
4610 __asm__ volatile( \
4611 "subu $29, $29, 8 \n\t" \
4612 "sw $28, 0($29) \n\t" \
4613 "sw $31, 4($29) \n\t" \
4614 "subu $29, $29, 16 \n\t" \
4615 "lw $4, 4(%1) \n\t" \
4616 "lw $5, 8(%1) \n\t" \
4617 "lw $6, 12(%1) \n\t" \
4618 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4619 VALGRIND_CALL_NOREDIR_T9 \
4620 "addu $29, $29, 16 \n\t" \
4621 "lw $28, 0($29) \n\t" \
4622 "lw $31, 4($29) \n\t" \
4623 "addu $29, $29, 8 \n\t" \
4624 "move %0, $2\n" \
4625 : /*out*/ "=r" (_res) \
4626 : /*in*/ "0" (&_argvec[0]) \
4627 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4628 ); \
4629 lval = (__typeof__(lval)) _res; \
4630 } while (0)
4631
4632#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4633 do { \
4634 volatile OrigFn _orig = (orig); \
4635 volatile unsigned long _argvec[5]; \
4636 volatile unsigned long _res; \
4637 _argvec[0] = (unsigned long)_orig.nraddr; \
4638 _argvec[1] = (unsigned long)(arg1); \
4639 _argvec[2] = (unsigned long)(arg2); \
4640 _argvec[3] = (unsigned long)(arg3); \
4641 _argvec[4] = (unsigned long)(arg4); \
4642 __asm__ volatile( \
4643 "subu $29, $29, 8 \n\t" \
4644 "sw $28, 0($29) \n\t" \
4645 "sw $31, 4($29) \n\t" \
4646 "subu $29, $29, 16 \n\t" \
4647 "lw $4, 4(%1) \n\t" \
4648 "lw $5, 8(%1) \n\t" \
4649 "lw $6, 12(%1) \n\t" \
4650 "lw $7, 16(%1) \n\t" \
4651 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4652 VALGRIND_CALL_NOREDIR_T9 \
4653 "addu $29, $29, 16 \n\t" \
4654 "lw $28, 0($29) \n\t" \
4655 "lw $31, 4($29) \n\t" \
4656 "addu $29, $29, 8 \n\t" \
4657 "move %0, $2\n" \
4658 : /*out*/ "=r" (_res) \
4659 : /*in*/ "0" (&_argvec[0]) \
4660 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4661 ); \
4662 lval = (__typeof__(lval)) _res; \
4663 } while (0)
4664
4665#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4666 do { \
4667 volatile OrigFn _orig = (orig); \
4668 volatile unsigned long _argvec[6]; \
4669 volatile unsigned long _res; \
4670 _argvec[0] = (unsigned long)_orig.nraddr; \
4671 _argvec[1] = (unsigned long)(arg1); \
4672 _argvec[2] = (unsigned long)(arg2); \
4673 _argvec[3] = (unsigned long)(arg3); \
4674 _argvec[4] = (unsigned long)(arg4); \
4675 _argvec[5] = (unsigned long)(arg5); \
4676 __asm__ volatile( \
4677 "subu $29, $29, 8 \n\t" \
4678 "sw $28, 0($29) \n\t" \
4679 "sw $31, 4($29) \n\t" \
4680 "lw $4, 20(%1) \n\t" \
4681 "subu $29, $29, 24\n\t" \
4682 "sw $4, 16($29) \n\t" \
4683 "lw $4, 4(%1) \n\t" \
4684 "lw $5, 8(%1) \n\t" \
4685 "lw $6, 12(%1) \n\t" \
4686 "lw $7, 16(%1) \n\t" \
4687 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4688 VALGRIND_CALL_NOREDIR_T9 \
4689 "addu $29, $29, 24 \n\t" \
4690 "lw $28, 0($29) \n\t" \
4691 "lw $31, 4($29) \n\t" \
4692 "addu $29, $29, 8 \n\t" \
4693 "move %0, $2\n" \
4694 : /*out*/ "=r" (_res) \
4695 : /*in*/ "0" (&_argvec[0]) \
4696 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4697 ); \
4698 lval = (__typeof__(lval)) _res; \
4699 } while (0)
4700#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4701 do { \
4702 volatile OrigFn _orig = (orig); \
4703 volatile unsigned long _argvec[7]; \
4704 volatile unsigned long _res; \
4705 _argvec[0] = (unsigned long)_orig.nraddr; \
4706 _argvec[1] = (unsigned long)(arg1); \
4707 _argvec[2] = (unsigned long)(arg2); \
4708 _argvec[3] = (unsigned long)(arg3); \
4709 _argvec[4] = (unsigned long)(arg4); \
4710 _argvec[5] = (unsigned long)(arg5); \
4711 _argvec[6] = (unsigned long)(arg6); \
4712 __asm__ volatile( \
4713 "subu $29, $29, 8 \n\t" \
4714 "sw $28, 0($29) \n\t" \
4715 "sw $31, 4($29) \n\t" \
4716 "lw $4, 20(%1) \n\t" \
4717 "subu $29, $29, 32\n\t" \
4718 "sw $4, 16($29) \n\t" \
4719 "lw $4, 24(%1) \n\t" \
4720 "nop\n\t" \
4721 "sw $4, 20($29) \n\t" \
4722 "lw $4, 4(%1) \n\t" \
4723 "lw $5, 8(%1) \n\t" \
4724 "lw $6, 12(%1) \n\t" \
4725 "lw $7, 16(%1) \n\t" \
4726 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4727 VALGRIND_CALL_NOREDIR_T9 \
4728 "addu $29, $29, 32 \n\t" \
4729 "lw $28, 0($29) \n\t" \
4730 "lw $31, 4($29) \n\t" \
4731 "addu $29, $29, 8 \n\t" \
4732 "move %0, $2\n" \
4733 : /*out*/ "=r" (_res) \
4734 : /*in*/ "0" (&_argvec[0]) \
4735 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4736 ); \
4737 lval = (__typeof__(lval)) _res; \
4738 } while (0)
4739
4740#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4741 arg7) \
4742 do { \
4743 volatile OrigFn _orig = (orig); \
4744 volatile unsigned long _argvec[8]; \
4745 volatile unsigned long _res; \
4746 _argvec[0] = (unsigned long)_orig.nraddr; \
4747 _argvec[1] = (unsigned long)(arg1); \
4748 _argvec[2] = (unsigned long)(arg2); \
4749 _argvec[3] = (unsigned long)(arg3); \
4750 _argvec[4] = (unsigned long)(arg4); \
4751 _argvec[5] = (unsigned long)(arg5); \
4752 _argvec[6] = (unsigned long)(arg6); \
4753 _argvec[7] = (unsigned long)(arg7); \
4754 __asm__ volatile( \
4755 "subu $29, $29, 8 \n\t" \
4756 "sw $28, 0($29) \n\t" \
4757 "sw $31, 4($29) \n\t" \
4758 "lw $4, 20(%1) \n\t" \
4759 "subu $29, $29, 32\n\t" \
4760 "sw $4, 16($29) \n\t" \
4761 "lw $4, 24(%1) \n\t" \
4762 "sw $4, 20($29) \n\t" \
4763 "lw $4, 28(%1) \n\t" \
4764 "sw $4, 24($29) \n\t" \
4765 "lw $4, 4(%1) \n\t" \
4766 "lw $5, 8(%1) \n\t" \
4767 "lw $6, 12(%1) \n\t" \
4768 "lw $7, 16(%1) \n\t" \
4769 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4770 VALGRIND_CALL_NOREDIR_T9 \
4771 "addu $29, $29, 32 \n\t" \
4772 "lw $28, 0($29) \n\t" \
4773 "lw $31, 4($29) \n\t" \
4774 "addu $29, $29, 8 \n\t" \
4775 "move %0, $2\n" \
4776 : /*out*/ "=r" (_res) \
4777 : /*in*/ "0" (&_argvec[0]) \
4778 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4779 ); \
4780 lval = (__typeof__(lval)) _res; \
4781 } while (0)
4782
4783#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4784 arg7,arg8) \
4785 do { \
4786 volatile OrigFn _orig = (orig); \
4787 volatile unsigned long _argvec[9]; \
4788 volatile unsigned long _res; \
4789 _argvec[0] = (unsigned long)_orig.nraddr; \
4790 _argvec[1] = (unsigned long)(arg1); \
4791 _argvec[2] = (unsigned long)(arg2); \
4792 _argvec[3] = (unsigned long)(arg3); \
4793 _argvec[4] = (unsigned long)(arg4); \
4794 _argvec[5] = (unsigned long)(arg5); \
4795 _argvec[6] = (unsigned long)(arg6); \
4796 _argvec[7] = (unsigned long)(arg7); \
4797 _argvec[8] = (unsigned long)(arg8); \
4798 __asm__ volatile( \
4799 "subu $29, $29, 8 \n\t" \
4800 "sw $28, 0($29) \n\t" \
4801 "sw $31, 4($29) \n\t" \
4802 "lw $4, 20(%1) \n\t" \
4803 "subu $29, $29, 40\n\t" \
4804 "sw $4, 16($29) \n\t" \
4805 "lw $4, 24(%1) \n\t" \
4806 "sw $4, 20($29) \n\t" \
4807 "lw $4, 28(%1) \n\t" \
4808 "sw $4, 24($29) \n\t" \
4809 "lw $4, 32(%1) \n\t" \
4810 "sw $4, 28($29) \n\t" \
4811 "lw $4, 4(%1) \n\t" \
4812 "lw $5, 8(%1) \n\t" \
4813 "lw $6, 12(%1) \n\t" \
4814 "lw $7, 16(%1) \n\t" \
4815 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4816 VALGRIND_CALL_NOREDIR_T9 \
4817 "addu $29, $29, 40 \n\t" \
4818 "lw $28, 0($29) \n\t" \
4819 "lw $31, 4($29) \n\t" \
4820 "addu $29, $29, 8 \n\t" \
4821 "move %0, $2\n" \
4822 : /*out*/ "=r" (_res) \
4823 : /*in*/ "0" (&_argvec[0]) \
4824 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4825 ); \
4826 lval = (__typeof__(lval)) _res; \
4827 } while (0)
4828
4829#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4830 arg7,arg8,arg9) \
4831 do { \
4832 volatile OrigFn _orig = (orig); \
4833 volatile unsigned long _argvec[10]; \
4834 volatile unsigned long _res; \
4835 _argvec[0] = (unsigned long)_orig.nraddr; \
4836 _argvec[1] = (unsigned long)(arg1); \
4837 _argvec[2] = (unsigned long)(arg2); \
4838 _argvec[3] = (unsigned long)(arg3); \
4839 _argvec[4] = (unsigned long)(arg4); \
4840 _argvec[5] = (unsigned long)(arg5); \
4841 _argvec[6] = (unsigned long)(arg6); \
4842 _argvec[7] = (unsigned long)(arg7); \
4843 _argvec[8] = (unsigned long)(arg8); \
4844 _argvec[9] = (unsigned long)(arg9); \
4845 __asm__ volatile( \
4846 "subu $29, $29, 8 \n\t" \
4847 "sw $28, 0($29) \n\t" \
4848 "sw $31, 4($29) \n\t" \
4849 "lw $4, 20(%1) \n\t" \
4850 "subu $29, $29, 40\n\t" \
4851 "sw $4, 16($29) \n\t" \
4852 "lw $4, 24(%1) \n\t" \
4853 "sw $4, 20($29) \n\t" \
4854 "lw $4, 28(%1) \n\t" \
4855 "sw $4, 24($29) \n\t" \
4856 "lw $4, 32(%1) \n\t" \
4857 "sw $4, 28($29) \n\t" \
4858 "lw $4, 36(%1) \n\t" \
4859 "sw $4, 32($29) \n\t" \
4860 "lw $4, 4(%1) \n\t" \
4861 "lw $5, 8(%1) \n\t" \
4862 "lw $6, 12(%1) \n\t" \
4863 "lw $7, 16(%1) \n\t" \
4864 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4865 VALGRIND_CALL_NOREDIR_T9 \
4866 "addu $29, $29, 40 \n\t" \
4867 "lw $28, 0($29) \n\t" \
4868 "lw $31, 4($29) \n\t" \
4869 "addu $29, $29, 8 \n\t" \
4870 "move %0, $2\n" \
4871 : /*out*/ "=r" (_res) \
4872 : /*in*/ "0" (&_argvec[0]) \
4873 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4874 ); \
4875 lval = (__typeof__(lval)) _res; \
4876 } while (0)
4877
4878#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4879 arg7,arg8,arg9,arg10) \
4880 do { \
4881 volatile OrigFn _orig = (orig); \
4882 volatile unsigned long _argvec[11]; \
4883 volatile unsigned long _res; \
4884 _argvec[0] = (unsigned long)_orig.nraddr; \
4885 _argvec[1] = (unsigned long)(arg1); \
4886 _argvec[2] = (unsigned long)(arg2); \
4887 _argvec[3] = (unsigned long)(arg3); \
4888 _argvec[4] = (unsigned long)(arg4); \
4889 _argvec[5] = (unsigned long)(arg5); \
4890 _argvec[6] = (unsigned long)(arg6); \
4891 _argvec[7] = (unsigned long)(arg7); \
4892 _argvec[8] = (unsigned long)(arg8); \
4893 _argvec[9] = (unsigned long)(arg9); \
4894 _argvec[10] = (unsigned long)(arg10); \
4895 __asm__ volatile( \
4896 "subu $29, $29, 8 \n\t" \
4897 "sw $28, 0($29) \n\t" \
4898 "sw $31, 4($29) \n\t" \
4899 "lw $4, 20(%1) \n\t" \
4900 "subu $29, $29, 48\n\t" \
4901 "sw $4, 16($29) \n\t" \
4902 "lw $4, 24(%1) \n\t" \
4903 "sw $4, 20($29) \n\t" \
4904 "lw $4, 28(%1) \n\t" \
4905 "sw $4, 24($29) \n\t" \
4906 "lw $4, 32(%1) \n\t" \
4907 "sw $4, 28($29) \n\t" \
4908 "lw $4, 36(%1) \n\t" \
4909 "sw $4, 32($29) \n\t" \
4910 "lw $4, 40(%1) \n\t" \
4911 "sw $4, 36($29) \n\t" \
4912 "lw $4, 4(%1) \n\t" \
4913 "lw $5, 8(%1) \n\t" \
4914 "lw $6, 12(%1) \n\t" \
4915 "lw $7, 16(%1) \n\t" \
4916 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4917 VALGRIND_CALL_NOREDIR_T9 \
4918 "addu $29, $29, 48 \n\t" \
4919 "lw $28, 0($29) \n\t" \
4920 "lw $31, 4($29) \n\t" \
4921 "addu $29, $29, 8 \n\t" \
4922 "move %0, $2\n" \
4923 : /*out*/ "=r" (_res) \
4924 : /*in*/ "0" (&_argvec[0]) \
4925 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4926 ); \
4927 lval = (__typeof__(lval)) _res; \
4928 } while (0)
4929
4930#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4931 arg6,arg7,arg8,arg9,arg10, \
4932 arg11) \
4933 do { \
4934 volatile OrigFn _orig = (orig); \
4935 volatile unsigned long _argvec[12]; \
4936 volatile unsigned long _res; \
4937 _argvec[0] = (unsigned long)_orig.nraddr; \
4938 _argvec[1] = (unsigned long)(arg1); \
4939 _argvec[2] = (unsigned long)(arg2); \
4940 _argvec[3] = (unsigned long)(arg3); \
4941 _argvec[4] = (unsigned long)(arg4); \
4942 _argvec[5] = (unsigned long)(arg5); \
4943 _argvec[6] = (unsigned long)(arg6); \
4944 _argvec[7] = (unsigned long)(arg7); \
4945 _argvec[8] = (unsigned long)(arg8); \
4946 _argvec[9] = (unsigned long)(arg9); \
4947 _argvec[10] = (unsigned long)(arg10); \
4948 _argvec[11] = (unsigned long)(arg11); \
4949 __asm__ volatile( \
4950 "subu $29, $29, 8 \n\t" \
4951 "sw $28, 0($29) \n\t" \
4952 "sw $31, 4($29) \n\t" \
4953 "lw $4, 20(%1) \n\t" \
4954 "subu $29, $29, 48\n\t" \
4955 "sw $4, 16($29) \n\t" \
4956 "lw $4, 24(%1) \n\t" \
4957 "sw $4, 20($29) \n\t" \
4958 "lw $4, 28(%1) \n\t" \
4959 "sw $4, 24($29) \n\t" \
4960 "lw $4, 32(%1) \n\t" \
4961 "sw $4, 28($29) \n\t" \
4962 "lw $4, 36(%1) \n\t" \
4963 "sw $4, 32($29) \n\t" \
4964 "lw $4, 40(%1) \n\t" \
4965 "sw $4, 36($29) \n\t" \
4966 "lw $4, 44(%1) \n\t" \
4967 "sw $4, 40($29) \n\t" \
4968 "lw $4, 4(%1) \n\t" \
4969 "lw $5, 8(%1) \n\t" \
4970 "lw $6, 12(%1) \n\t" \
4971 "lw $7, 16(%1) \n\t" \
4972 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4973 VALGRIND_CALL_NOREDIR_T9 \
4974 "addu $29, $29, 48 \n\t" \
4975 "lw $28, 0($29) \n\t" \
4976 "lw $31, 4($29) \n\t" \
4977 "addu $29, $29, 8 \n\t" \
4978 "move %0, $2\n" \
4979 : /*out*/ "=r" (_res) \
4980 : /*in*/ "0" (&_argvec[0]) \
4981 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4982 ); \
4983 lval = (__typeof__(lval)) _res; \
4984 } while (0)
4985
4986#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4987 arg6,arg7,arg8,arg9,arg10, \
4988 arg11,arg12) \
4989 do { \
4990 volatile OrigFn _orig = (orig); \
4991 volatile unsigned long _argvec[13]; \
4992 volatile unsigned long _res; \
4993 _argvec[0] = (unsigned long)_orig.nraddr; \
4994 _argvec[1] = (unsigned long)(arg1); \
4995 _argvec[2] = (unsigned long)(arg2); \
4996 _argvec[3] = (unsigned long)(arg3); \
4997 _argvec[4] = (unsigned long)(arg4); \
4998 _argvec[5] = (unsigned long)(arg5); \
4999 _argvec[6] = (unsigned long)(arg6); \
5000 _argvec[7] = (unsigned long)(arg7); \
5001 _argvec[8] = (unsigned long)(arg8); \
5002 _argvec[9] = (unsigned long)(arg9); \
5003 _argvec[10] = (unsigned long)(arg10); \
5004 _argvec[11] = (unsigned long)(arg11); \
5005 _argvec[12] = (unsigned long)(arg12); \
5006 __asm__ volatile( \
5007 "subu $29, $29, 8 \n\t" \
5008 "sw $28, 0($29) \n\t" \
5009 "sw $31, 4($29) \n\t" \
5010 "lw $4, 20(%1) \n\t" \
5011 "subu $29, $29, 56\n\t" \
5012 "sw $4, 16($29) \n\t" \
5013 "lw $4, 24(%1) \n\t" \
5014 "sw $4, 20($29) \n\t" \
5015 "lw $4, 28(%1) \n\t" \
5016 "sw $4, 24($29) \n\t" \
5017 "lw $4, 32(%1) \n\t" \
5018 "sw $4, 28($29) \n\t" \
5019 "lw $4, 36(%1) \n\t" \
5020 "sw $4, 32($29) \n\t" \
5021 "lw $4, 40(%1) \n\t" \
5022 "sw $4, 36($29) \n\t" \
5023 "lw $4, 44(%1) \n\t" \
5024 "sw $4, 40($29) \n\t" \
5025 "lw $4, 48(%1) \n\t" \
5026 "sw $4, 44($29) \n\t" \
5027 "lw $4, 4(%1) \n\t" \
5028 "lw $5, 8(%1) \n\t" \
5029 "lw $6, 12(%1) \n\t" \
5030 "lw $7, 16(%1) \n\t" \
5031 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5032 VALGRIND_CALL_NOREDIR_T9 \
5033 "addu $29, $29, 56 \n\t" \
5034 "lw $28, 0($29) \n\t" \
5035 "lw $31, 4($29) \n\t" \
5036 "addu $29, $29, 8 \n\t" \
5037 "move %0, $2\n" \
5038 : /*out*/ "=r" (_res) \
5039 : /*in*/ "r" (&_argvec[0]) \
5040 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5041 ); \
5042 lval = (__typeof__(lval)) _res; \
5043 } while (0)
5044
5045#endif /* PLAT_mips32_linux */
5046
5047/* ------------------------- mips64-linux ------------------------- */
5048
5049#if defined(PLAT_mips64_linux)
5050
5051/* These regs are trashed by the hidden call. */
5052#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5053"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5054"$25", "$31"
5055
5056/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5057 long) == 4. */
5058
5059#define CALL_FN_W_v(lval, orig) \
5060 do { \
5061 volatile OrigFn _orig = (orig); \
5062 volatile unsigned long _argvec[1]; \
5063 volatile unsigned long _res; \
5064 _argvec[0] = (unsigned long)_orig.nraddr; \
5065 __asm__ volatile( \
5066 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5067 VALGRIND_CALL_NOREDIR_T9 \
5068 "move %0, $2\n" \
5069 : /*out*/ "=r" (_res) \
5070 : /*in*/ "0" (&_argvec[0]) \
5071 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5072 ); \
5073 lval = (__typeof__(lval)) _res; \
5074 } while (0)
5075
5076#define CALL_FN_W_W(lval, orig, arg1) \
5077 do { \
5078 volatile OrigFn _orig = (orig); \
5079 volatile unsigned long _argvec[2]; \
5080 volatile unsigned long _res; \
5081 _argvec[0] = (unsigned long)_orig.nraddr; \
5082 _argvec[1] = (unsigned long)(arg1); \
5083 __asm__ volatile( \
5084 "ld $4, 8(%1)\n\t" /* arg1*/ \
5085 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5086 VALGRIND_CALL_NOREDIR_T9 \
5087 "move %0, $2\n" \
5088 : /*out*/ "=r" (_res) \
5089 : /*in*/ "r" (&_argvec[0]) \
5090 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5091 ); \
5092 lval = (__typeof__(lval)) _res; \
5093 } while (0)
5094
5095#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5096 do { \
5097 volatile OrigFn _orig = (orig); \
5098 volatile unsigned long _argvec[3]; \
5099 volatile unsigned long _res; \
5100 _argvec[0] = (unsigned long)_orig.nraddr; \
5101 _argvec[1] = (unsigned long)(arg1); \
5102 _argvec[2] = (unsigned long)(arg2); \
5103 __asm__ volatile( \
5104 "ld $4, 8(%1)\n\t" \
5105 "ld $5, 16(%1)\n\t" \
5106 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5107 VALGRIND_CALL_NOREDIR_T9 \
5108 "move %0, $2\n" \
5109 : /*out*/ "=r" (_res) \
5110 : /*in*/ "r" (&_argvec[0]) \
5111 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5112 ); \
5113 lval = (__typeof__(lval)) _res; \
5114 } while (0)
5115
5116#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5117 do { \
5118 volatile OrigFn _orig = (orig); \
5119 volatile unsigned long _argvec[4]; \
5120 volatile unsigned long _res; \
5121 _argvec[0] = (unsigned long)_orig.nraddr; \
5122 _argvec[1] = (unsigned long)(arg1); \
5123 _argvec[2] = (unsigned long)(arg2); \
5124 _argvec[3] = (unsigned long)(arg3); \
5125 __asm__ volatile( \
5126 "ld $4, 8(%1)\n\t" \
5127 "ld $5, 16(%1)\n\t" \
5128 "ld $6, 24(%1)\n\t" \
5129 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5130 VALGRIND_CALL_NOREDIR_T9 \
5131 "move %0, $2\n" \
5132 : /*out*/ "=r" (_res) \
5133 : /*in*/ "r" (&_argvec[0]) \
5134 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5135 ); \
5136 lval = (__typeof__(lval)) _res; \
5137 } while (0)
5138
5139#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5140 do { \
5141 volatile OrigFn _orig = (orig); \
5142 volatile unsigned long _argvec[5]; \
5143 volatile unsigned long _res; \
5144 _argvec[0] = (unsigned long)_orig.nraddr; \
5145 _argvec[1] = (unsigned long)(arg1); \
5146 _argvec[2] = (unsigned long)(arg2); \
5147 _argvec[3] = (unsigned long)(arg3); \
5148 _argvec[4] = (unsigned long)(arg4); \
5149 __asm__ volatile( \
5150 "ld $4, 8(%1)\n\t" \
5151 "ld $5, 16(%1)\n\t" \
5152 "ld $6, 24(%1)\n\t" \
5153 "ld $7, 32(%1)\n\t" \
5154 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5155 VALGRIND_CALL_NOREDIR_T9 \
5156 "move %0, $2\n" \
5157 : /*out*/ "=r" (_res) \
5158 : /*in*/ "r" (&_argvec[0]) \
5159 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5160 ); \
5161 lval = (__typeof__(lval)) _res; \
5162 } while (0)
5163
5164#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5165 do { \
5166 volatile OrigFn _orig = (orig); \
5167 volatile unsigned long _argvec[6]; \
5168 volatile unsigned long _res; \
5169 _argvec[0] = (unsigned long)_orig.nraddr; \
5170 _argvec[1] = (unsigned long)(arg1); \
5171 _argvec[2] = (unsigned long)(arg2); \
5172 _argvec[3] = (unsigned long)(arg3); \
5173 _argvec[4] = (unsigned long)(arg4); \
5174 _argvec[5] = (unsigned long)(arg5); \
5175 __asm__ volatile( \
5176 "ld $4, 8(%1)\n\t" \
5177 "ld $5, 16(%1)\n\t" \
5178 "ld $6, 24(%1)\n\t" \
5179 "ld $7, 32(%1)\n\t" \
5180 "ld $8, 40(%1)\n\t" \
5181 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5182 VALGRIND_CALL_NOREDIR_T9 \
5183 "move %0, $2\n" \
5184 : /*out*/ "=r" (_res) \
5185 : /*in*/ "r" (&_argvec[0]) \
5186 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5187 ); \
5188 lval = (__typeof__(lval)) _res; \
5189 } while (0)
5190
5191#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5192 do { \
5193 volatile OrigFn _orig = (orig); \
5194 volatile unsigned long _argvec[7]; \
5195 volatile unsigned long _res; \
5196 _argvec[0] = (unsigned long)_orig.nraddr; \
5197 _argvec[1] = (unsigned long)(arg1); \
5198 _argvec[2] = (unsigned long)(arg2); \
5199 _argvec[3] = (unsigned long)(arg3); \
5200 _argvec[4] = (unsigned long)(arg4); \
5201 _argvec[5] = (unsigned long)(arg5); \
5202 _argvec[6] = (unsigned long)(arg6); \
5203 __asm__ volatile( \
5204 "ld $4, 8(%1)\n\t" \
5205 "ld $5, 16(%1)\n\t" \
5206 "ld $6, 24(%1)\n\t" \
5207 "ld $7, 32(%1)\n\t" \
5208 "ld $8, 40(%1)\n\t" \
5209 "ld $9, 48(%1)\n\t" \
5210 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5211 VALGRIND_CALL_NOREDIR_T9 \
5212 "move %0, $2\n" \
5213 : /*out*/ "=r" (_res) \
5214 : /*in*/ "r" (&_argvec[0]) \
5215 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5216 ); \
5217 lval = (__typeof__(lval)) _res; \
5218 } while (0)
5219
5220#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5221 arg7) \
5222 do { \
5223 volatile OrigFn _orig = (orig); \
5224 volatile unsigned long _argvec[8]; \
5225 volatile unsigned long _res; \
5226 _argvec[0] = (unsigned long)_orig.nraddr; \
5227 _argvec[1] = (unsigned long)(arg1); \
5228 _argvec[2] = (unsigned long)(arg2); \
5229 _argvec[3] = (unsigned long)(arg3); \
5230 _argvec[4] = (unsigned long)(arg4); \
5231 _argvec[5] = (unsigned long)(arg5); \
5232 _argvec[6] = (unsigned long)(arg6); \
5233 _argvec[7] = (unsigned long)(arg7); \
5234 __asm__ volatile( \
5235 "ld $4, 8(%1)\n\t" \
5236 "ld $5, 16(%1)\n\t" \
5237 "ld $6, 24(%1)\n\t" \
5238 "ld $7, 32(%1)\n\t" \
5239 "ld $8, 40(%1)\n\t" \
5240 "ld $9, 48(%1)\n\t" \
5241 "ld $10, 56(%1)\n\t" \
5242 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5243 VALGRIND_CALL_NOREDIR_T9 \
5244 "move %0, $2\n" \
5245 : /*out*/ "=r" (_res) \
5246 : /*in*/ "r" (&_argvec[0]) \
5247 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5248 ); \
5249 lval = (__typeof__(lval)) _res; \
5250 } while (0)
5251
5252#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5253 arg7,arg8) \
5254 do { \
5255 volatile OrigFn _orig = (orig); \
5256 volatile unsigned long _argvec[9]; \
5257 volatile unsigned long _res; \
5258 _argvec[0] = (unsigned long)_orig.nraddr; \
5259 _argvec[1] = (unsigned long)(arg1); \
5260 _argvec[2] = (unsigned long)(arg2); \
5261 _argvec[3] = (unsigned long)(arg3); \
5262 _argvec[4] = (unsigned long)(arg4); \
5263 _argvec[5] = (unsigned long)(arg5); \
5264 _argvec[6] = (unsigned long)(arg6); \
5265 _argvec[7] = (unsigned long)(arg7); \
5266 _argvec[8] = (unsigned long)(arg8); \
5267 __asm__ volatile( \
5268 "ld $4, 8(%1)\n\t" \
5269 "ld $5, 16(%1)\n\t" \
5270 "ld $6, 24(%1)\n\t" \
5271 "ld $7, 32(%1)\n\t" \
5272 "ld $8, 40(%1)\n\t" \
5273 "ld $9, 48(%1)\n\t" \
5274 "ld $10, 56(%1)\n\t" \
5275 "ld $11, 64(%1)\n\t" \
5276 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5277 VALGRIND_CALL_NOREDIR_T9 \
5278 "move %0, $2\n" \
5279 : /*out*/ "=r" (_res) \
5280 : /*in*/ "r" (&_argvec[0]) \
5281 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5282 ); \
5283 lval = (__typeof__(lval)) _res; \
5284 } while (0)
5285
5286#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5287 arg7,arg8,arg9) \
5288 do { \
5289 volatile OrigFn _orig = (orig); \
5290 volatile unsigned long _argvec[10]; \
5291 volatile unsigned long _res; \
5292 _argvec[0] = (unsigned long)_orig.nraddr; \
5293 _argvec[1] = (unsigned long)(arg1); \
5294 _argvec[2] = (unsigned long)(arg2); \
5295 _argvec[3] = (unsigned long)(arg3); \
5296 _argvec[4] = (unsigned long)(arg4); \
5297 _argvec[5] = (unsigned long)(arg5); \
5298 _argvec[6] = (unsigned long)(arg6); \
5299 _argvec[7] = (unsigned long)(arg7); \
5300 _argvec[8] = (unsigned long)(arg8); \
5301 _argvec[9] = (unsigned long)(arg9); \
5302 __asm__ volatile( \
5303 "dsubu $29, $29, 8\n\t" \
5304 "ld $4, 72(%1)\n\t" \
5305 "sd $4, 0($29)\n\t" \
5306 "ld $4, 8(%1)\n\t" \
5307 "ld $5, 16(%1)\n\t" \
5308 "ld $6, 24(%1)\n\t" \
5309 "ld $7, 32(%1)\n\t" \
5310 "ld $8, 40(%1)\n\t" \
5311 "ld $9, 48(%1)\n\t" \
5312 "ld $10, 56(%1)\n\t" \
5313 "ld $11, 64(%1)\n\t" \
5314 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5315 VALGRIND_CALL_NOREDIR_T9 \
5316 "daddu $29, $29, 8\n\t" \
5317 "move %0, $2\n" \
5318 : /*out*/ "=r" (_res) \
5319 : /*in*/ "r" (&_argvec[0]) \
5320 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5321 ); \
5322 lval = (__typeof__(lval)) _res; \
5323 } while (0)
5324
5325#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5326 arg7,arg8,arg9,arg10) \
5327 do { \
5328 volatile OrigFn _orig = (orig); \
5329 volatile unsigned long _argvec[11]; \
5330 volatile unsigned long _res; \
5331 _argvec[0] = (unsigned long)_orig.nraddr; \
5332 _argvec[1] = (unsigned long)(arg1); \
5333 _argvec[2] = (unsigned long)(arg2); \
5334 _argvec[3] = (unsigned long)(arg3); \
5335 _argvec[4] = (unsigned long)(arg4); \
5336 _argvec[5] = (unsigned long)(arg5); \
5337 _argvec[6] = (unsigned long)(arg6); \
5338 _argvec[7] = (unsigned long)(arg7); \
5339 _argvec[8] = (unsigned long)(arg8); \
5340 _argvec[9] = (unsigned long)(arg9); \
5341 _argvec[10] = (unsigned long)(arg10); \
5342 __asm__ volatile( \
5343 "dsubu $29, $29, 16\n\t" \
5344 "ld $4, 72(%1)\n\t" \
5345 "sd $4, 0($29)\n\t" \
5346 "ld $4, 80(%1)\n\t" \
5347 "sd $4, 8($29)\n\t" \
5348 "ld $4, 8(%1)\n\t" \
5349 "ld $5, 16(%1)\n\t" \
5350 "ld $6, 24(%1)\n\t" \
5351 "ld $7, 32(%1)\n\t" \
5352 "ld $8, 40(%1)\n\t" \
5353 "ld $9, 48(%1)\n\t" \
5354 "ld $10, 56(%1)\n\t" \
5355 "ld $11, 64(%1)\n\t" \
5356 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5357 VALGRIND_CALL_NOREDIR_T9 \
5358 "daddu $29, $29, 16\n\t" \
5359 "move %0, $2\n" \
5360 : /*out*/ "=r" (_res) \
5361 : /*in*/ "r" (&_argvec[0]) \
5362 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5363 ); \
5364 lval = (__typeof__(lval)) _res; \
5365 } while (0)
5366
5367#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5368 arg6,arg7,arg8,arg9,arg10, \
5369 arg11) \
5370 do { \
5371 volatile OrigFn _orig = (orig); \
5372 volatile unsigned long _argvec[12]; \
5373 volatile unsigned long _res; \
5374 _argvec[0] = (unsigned long)_orig.nraddr; \
5375 _argvec[1] = (unsigned long)(arg1); \
5376 _argvec[2] = (unsigned long)(arg2); \
5377 _argvec[3] = (unsigned long)(arg3); \
5378 _argvec[4] = (unsigned long)(arg4); \
5379 _argvec[5] = (unsigned long)(arg5); \
5380 _argvec[6] = (unsigned long)(arg6); \
5381 _argvec[7] = (unsigned long)(arg7); \
5382 _argvec[8] = (unsigned long)(arg8); \
5383 _argvec[9] = (unsigned long)(arg9); \
5384 _argvec[10] = (unsigned long)(arg10); \
5385 _argvec[11] = (unsigned long)(arg11); \
5386 __asm__ volatile( \
5387 "dsubu $29, $29, 24\n\t" \
5388 "ld $4, 72(%1)\n\t" \
5389 "sd $4, 0($29)\n\t" \
5390 "ld $4, 80(%1)\n\t" \
5391 "sd $4, 8($29)\n\t" \
5392 "ld $4, 88(%1)\n\t" \
5393 "sd $4, 16($29)\n\t" \
5394 "ld $4, 8(%1)\n\t" \
5395 "ld $5, 16(%1)\n\t" \
5396 "ld $6, 24(%1)\n\t" \
5397 "ld $7, 32(%1)\n\t" \
5398 "ld $8, 40(%1)\n\t" \
5399 "ld $9, 48(%1)\n\t" \
5400 "ld $10, 56(%1)\n\t" \
5401 "ld $11, 64(%1)\n\t" \
5402 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5403 VALGRIND_CALL_NOREDIR_T9 \
5404 "daddu $29, $29, 24\n\t" \
5405 "move %0, $2\n" \
5406 : /*out*/ "=r" (_res) \
5407 : /*in*/ "r" (&_argvec[0]) \
5408 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5409 ); \
5410 lval = (__typeof__(lval)) _res; \
5411 } while (0)
5412
5413#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5414 arg6,arg7,arg8,arg9,arg10, \
5415 arg11,arg12) \
5416 do { \
5417 volatile OrigFn _orig = (orig); \
5418 volatile unsigned long _argvec[13]; \
5419 volatile unsigned long _res; \
5420 _argvec[0] = (unsigned long)_orig.nraddr; \
5421 _argvec[1] = (unsigned long)(arg1); \
5422 _argvec[2] = (unsigned long)(arg2); \
5423 _argvec[3] = (unsigned long)(arg3); \
5424 _argvec[4] = (unsigned long)(arg4); \
5425 _argvec[5] = (unsigned long)(arg5); \
5426 _argvec[6] = (unsigned long)(arg6); \
5427 _argvec[7] = (unsigned long)(arg7); \
5428 _argvec[8] = (unsigned long)(arg8); \
5429 _argvec[9] = (unsigned long)(arg9); \
5430 _argvec[10] = (unsigned long)(arg10); \
5431 _argvec[11] = (unsigned long)(arg11); \
5432 _argvec[12] = (unsigned long)(arg12); \
5433 __asm__ volatile( \
5434 "dsubu $29, $29, 32\n\t" \
5435 "ld $4, 72(%1)\n\t" \
5436 "sd $4, 0($29)\n\t" \
5437 "ld $4, 80(%1)\n\t" \
5438 "sd $4, 8($29)\n\t" \
5439 "ld $4, 88(%1)\n\t" \
5440 "sd $4, 16($29)\n\t" \
5441 "ld $4, 96(%1)\n\t" \
5442 "sd $4, 24($29)\n\t" \
5443 "ld $4, 8(%1)\n\t" \
5444 "ld $5, 16(%1)\n\t" \
5445 "ld $6, 24(%1)\n\t" \
5446 "ld $7, 32(%1)\n\t" \
5447 "ld $8, 40(%1)\n\t" \
5448 "ld $9, 48(%1)\n\t" \
5449 "ld $10, 56(%1)\n\t" \
5450 "ld $11, 64(%1)\n\t" \
5451 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5452 VALGRIND_CALL_NOREDIR_T9 \
5453 "daddu $29, $29, 32\n\t" \
5454 "move %0, $2\n" \
5455 : /*out*/ "=r" (_res) \
5456 : /*in*/ "r" (&_argvec[0]) \
5457 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5458 ); \
5459 lval = (__typeof__(lval)) _res; \
5460 } while (0)
5461
5462#endif /* PLAT_mips64_linux */
5463
5464
5465/* ------------------------------------------------------------------ */
5466/* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
5467/* */
5468/* ------------------------------------------------------------------ */
5469
5470/* Some request codes. There are many more of these, but most are not
5471 exposed to end-user view. These are the public ones, all of the
5472 form 0x1000 + small_number.
5473
5474 Core ones are in the range 0x00000000--0x0000ffff. The non-public
5475 ones start at 0x2000.
5476*/
5477
5478/* These macros are used by tools -- they must be public, but don't
5479 embed them into other programs. */
5480#define VG_USERREQ_TOOL_BASE(a,b) \
5481 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
5482#define VG_IS_TOOL_USERREQ(a, b, v) \
5483 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
5484
5485/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
5486 This enum comprises an ABI exported by Valgrind to programs
5487 which use client requests. DO NOT CHANGE THE ORDER OF THESE
5488 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
5489typedef
5490 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
5491 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
5492
5493 /* These allow any function to be called from the simulated
5494 CPU but run on the real CPU. Nb: the first arg passed to
5495 the function is always the ThreadId of the running
5496 thread! So CLIENT_CALL0 actually requires a 1 arg
5497 function, etc. */
5498 VG_USERREQ__CLIENT_CALL0 = 0x1101,
5499 VG_USERREQ__CLIENT_CALL1 = 0x1102,
5500 VG_USERREQ__CLIENT_CALL2 = 0x1103,
5501 VG_USERREQ__CLIENT_CALL3 = 0x1104,
5502
5503 /* Can be useful in regression testing suites -- eg. can
5504 send Valgrind's output to /dev/null and still count
5505 errors. */
5506 VG_USERREQ__COUNT_ERRORS = 0x1201,
5507
5508 /* Allows the client program and/or gdbserver to execute a monitor
5509 command. */
5510 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
5511
5512 /* These are useful and can be interpreted by any tool that
5513 tracks malloc() et al, by using vg_replace_malloc.c. */
5514 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
5515 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
5516 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
5517 /* Memory pool support. */
5518 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
5519 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
5520 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
5521 VG_USERREQ__MEMPOOL_FREE = 0x1306,
5522 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
5523 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
5524 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
5525 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
5526
5527 /* Allow printfs to valgrind log. */
5528 /* The first two pass the va_list argument by value, which
5529 assumes it is the same size as or smaller than a UWord,
5530 which generally isn't the case. Hence are deprecated.
5531 The second two pass the vargs by reference and so are
5532 immune to this problem. */
5533 /* both :: char* fmt, va_list vargs (DEPRECATED) */
5534 VG_USERREQ__PRINTF = 0x1401,
5535 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
5536 /* both :: char* fmt, va_list* vargs */
5537 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
5538 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
5539
5540 /* Stack support. */
5541 VG_USERREQ__STACK_REGISTER = 0x1501,
5542 VG_USERREQ__STACK_DEREGISTER = 0x1502,
5543 VG_USERREQ__STACK_CHANGE = 0x1503,
5544
5545 /* Wine support */
5546 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
5547
5548 /* Querying of debug info. */
5549 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
5550
5551 /* Disable/enable error reporting level. Takes a single
5552 Word arg which is the delta to this thread's error
5553 disablement indicator. Hence 1 disables or further
5554 disables errors, and -1 moves back towards enablement.
5555 Other values are not allowed. */
5556 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
5557
5558 /* Initialise IR injection */
5559 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
5560 } Vg_ClientRequest;
5561
5562#if !defined(__GNUC__)
5563# define __extension__ /* */
5564#endif
5565
5566
5567/* Returns the number of Valgrinds this code is running under. That
5568 is, 0 if running natively, 1 if running under Valgrind, 2 if
5569 running under Valgrind which is running under another Valgrind,
5570 etc. */
5571#define RUNNING_ON_VALGRIND \
5572 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
5573 VG_USERREQ__RUNNING_ON_VALGRIND, \
5574 0, 0, 0, 0, 0) \
5575
5576
5577/* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
5578 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
5579 since it provides a way to make sure valgrind will retranslate the
5580 invalidated area. Returns no value. */
5581#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
5582 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
5583 _qzz_addr, _qzz_len, 0, 0, 0)
5584
5585
5586/* These requests are for getting Valgrind itself to print something.
5587 Possibly with a backtrace. This is a really ugly hack. The return value
5588 is the number of characters printed, excluding the "**<pid>** " part at the
5589 start and the backtrace (if present). */
5590
5591#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5592/* Modern GCC will optimize the static routine out if unused,
5593 and unused attribute will shut down warnings about it. */
5594static int VALGRIND_PRINTF(const char *format, ...)
5595 __attribute__((format(__printf__, 1, 2), __unused__));
5596#endif
5597static int
5598#if defined(_MSC_VER)
5599__inline
5600#endif
5601VALGRIND_PRINTF(const char *format, ...)
5602{
5603#if defined(NVALGRIND)
5604 return 0;
5605#else /* NVALGRIND */
5606#if defined(_MSC_VER) || defined(__MINGW64__)
5607 uintptr_t _qzz_res;
5608#else
5609 unsigned long _qzz_res;
5610#endif
5611 va_list vargs;
5612 va_start(vargs, format);
5613#if defined(_MSC_VER) || defined(__MINGW64__)
5614 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5615 VG_USERREQ__PRINTF_VALIST_BY_REF,
5616 (uintptr_t)format,
5617 (uintptr_t)&vargs,
5618 0, 0, 0);
5619#else
5620 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5621 VG_USERREQ__PRINTF_VALIST_BY_REF,
5622 (unsigned long)format,
5623 (unsigned long)&vargs,
5624 0, 0, 0);
5625#endif
5626 va_end(vargs);
5627 return (int)_qzz_res;
5628#endif /* NVALGRIND */
5629}
5630
5631#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5632static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5633 __attribute__((format(__printf__, 1, 2), __unused__));
5634#endif
5635static int
5636#if defined(_MSC_VER)
5637__inline
5638#endif
5639VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5640{
5641#if defined(NVALGRIND)
5642 return 0;
5643#else /* NVALGRIND */
5644#if defined(_MSC_VER) || defined(__MINGW64__)
5645 uintptr_t _qzz_res;
5646#else
5647 unsigned long _qzz_res;
5648#endif
5649 va_list vargs;
5650 va_start(vargs, format);
5651#if defined(_MSC_VER) || defined(__MINGW64__)
5652 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5653 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
5654 (uintptr_t)format,
5655 (uintptr_t)&vargs,
5656 0, 0, 0);
5657#else
5658 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5659 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
5660 (unsigned long)format,
5661 (unsigned long)&vargs,
5662 0, 0, 0);
5663#endif
5664 va_end(vargs);
5665 return (int)_qzz_res;
5666#endif /* NVALGRIND */
5667}
5668
5669
5670/* These requests allow control to move from the simulated CPU to the
5671 real CPU, calling an arbitary function.
5672
5673 Note that the current ThreadId is inserted as the first argument.
5674 So this call:
5675
5676 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
5677
5678 requires f to have this signature:
5679
5680 Word f(Word tid, Word arg1, Word arg2)
5681
5682 where "Word" is a word-sized type.
5683
5684 Note that these client requests are not entirely reliable. For example,
5685 if you call a function with them that subsequently calls printf(),
5686 there's a high chance Valgrind will crash. Generally, your prospects of
5687 these working are made higher if the called function does not refer to
5688 any global variables, and does not refer to any libc or other functions
5689 (printf et al). Any kind of entanglement with libc or dynamic linking is
5690 likely to have a bad outcome, for tricky reasons which we've grappled
5691 with a lot in the past.
5692*/
5693#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
5694 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5695 VG_USERREQ__CLIENT_CALL0, \
5696 _qyy_fn, \
5697 0, 0, 0, 0)
5698
5699#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
5700 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5701 VG_USERREQ__CLIENT_CALL1, \
5702 _qyy_fn, \
5703 _qyy_arg1, 0, 0, 0)
5704
5705#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
5706 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5707 VG_USERREQ__CLIENT_CALL2, \
5708 _qyy_fn, \
5709 _qyy_arg1, _qyy_arg2, 0, 0)
5710
5711#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
5712 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5713 VG_USERREQ__CLIENT_CALL3, \
5714 _qyy_fn, \
5715 _qyy_arg1, _qyy_arg2, \
5716 _qyy_arg3, 0)
5717
5718
5719/* Counts the number of errors that have been recorded by a tool. Nb:
5720 the tool must record the errors with VG_(maybe_record_error)() or
5721 VG_(unique_error)() for them to be counted. */
5722#define VALGRIND_COUNT_ERRORS \
5723 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
5724 0 /* default return */, \
5725 VG_USERREQ__COUNT_ERRORS, \
5726 0, 0, 0, 0, 0)
5727
5728/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
5729 when heap blocks are allocated in order to give accurate results. This
5730 happens automatically for the standard allocator functions such as
5731 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
5732 delete[], etc.
5733
5734 But if your program uses a custom allocator, this doesn't automatically
5735 happen, and Valgrind will not do as well. For example, if you allocate
5736 superblocks with mmap() and then allocates chunks of the superblocks, all
5737 Valgrind's observations will be at the mmap() level and it won't know that
5738 the chunks should be considered separate entities. In Memcheck's case,
5739 that means you probably won't get heap block overrun detection (because
5740 there won't be redzones marked as unaddressable) and you definitely won't
5741 get any leak detection.
5742
5743 The following client requests allow a custom allocator to be annotated so
5744 that it can be handled accurately by Valgrind.
5745
5746 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
5747 by a malloc()-like function. For Memcheck (an illustrative case), this
5748 does two things:
5749
5750 - It records that the block has been allocated. This means any addresses
5751 within the block mentioned in error messages will be
5752 identified as belonging to the block. It also means that if the block
5753 isn't freed it will be detected by the leak checker.
5754
5755 - It marks the block as being addressable and undefined (if 'is_zeroed' is
5756 not set), or addressable and defined (if 'is_zeroed' is set). This
5757 controls how accesses to the block by the program are handled.
5758
5759 'addr' is the start of the usable block (ie. after any
5760 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
5761 can apply redzones -- these are blocks of padding at the start and end of
5762 each block. Adding redzones is recommended as it makes it much more likely
5763 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
5764 zeroed (or filled with another predictable value), as is the case for
5765 calloc().
5766
5767 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
5768 heap block -- that will be used by the client program -- is allocated.
5769 It's best to put it at the outermost level of the allocator if possible;
5770 for example, if you have a function my_alloc() which calls
5771 internal_alloc(), and the client request is put inside internal_alloc(),
5772 stack traces relating to the heap block will contain entries for both
5773 my_alloc() and internal_alloc(), which is probably not what you want.
5774
5775 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
5776 custom blocks from within a heap block, B, that has been allocated with
5777 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
5778 -- the custom blocks will take precedence.
5779
5780 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
5781 Memcheck, it does two things:
5782
5783 - It records that the block has been deallocated. This assumes that the
5784 block was annotated as having been allocated via
5785 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5786
5787 - It marks the block as being unaddressable.
5788
5789 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
5790 heap block is deallocated.
5791
5792 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
5793 Memcheck, it does four things:
5794
5795 - It records that the size of a block has been changed. This assumes that
5796 the block was annotated as having been allocated via
5797 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5798
5799 - If the block shrunk, it marks the freed memory as being unaddressable.
5800
5801 - If the block grew, it marks the new area as undefined and defines a red
5802 zone past the end of the new block.
5803
5804 - The V-bits of the overlap between the old and the new block are preserved.
5805
5806 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
5807 and before deallocation of the old block.
5808
5809 In many cases, these three client requests will not be enough to get your
5810 allocator working well with Memcheck. More specifically, if your allocator
5811 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
5812 will be necessary to mark the memory as addressable just before the zeroing
5813 occurs, otherwise you'll get a lot of invalid write errors. For example,
5814 you'll need to do this if your allocator recycles freed blocks, but it
5815 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
5816 Alternatively, if your allocator reuses freed blocks for allocator-internal
5817 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
5818
5819 Really, what's happening is a blurring of the lines between the client
5820 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
5821 memory should be considered unaddressable to the client program, but the
5822 allocator knows more than the rest of the client program and so may be able
5823 to safely access it. Extra client requests are necessary for Valgrind to
5824 understand the distinction between the allocator and the rest of the
5825 program.
5826
5827 Ignored if addr == 0.
5828*/
5829#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
5830 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
5831 addr, sizeB, rzB, is_zeroed, 0)
5832
5833/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5834 Ignored if addr == 0.
5835*/
5836#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
5837 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
5838 addr, oldSizeB, newSizeB, rzB, 0)
5839
5840/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5841 Ignored if addr == 0.
5842*/
5843#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
5844 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
5845 addr, rzB, 0, 0, 0)
5846
5847/* Create a memory pool. */
5848#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
5849 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
5850 pool, rzB, is_zeroed, 0, 0)
5851
5852/* Destroy a memory pool. */
5853#define VALGRIND_DESTROY_MEMPOOL(pool) \
5854 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
5855 pool, 0, 0, 0, 0)
5856
5857/* Associate a piece of memory with a memory pool. */
5858#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
5859 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
5860 pool, addr, size, 0, 0)
5861
5862/* Disassociate a piece of memory from a memory pool. */
5863#define VALGRIND_MEMPOOL_FREE(pool, addr) \
5864 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
5865 pool, addr, 0, 0, 0)
5866
5867/* Disassociate any pieces outside a particular range. */
5868#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
5869 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
5870 pool, addr, size, 0, 0)
5871
5872/* Resize and/or move a piece associated with a memory pool. */
5873#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
5874 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
5875 poolA, poolB, 0, 0, 0)
5876
5877/* Resize and/or move a piece associated with a memory pool. */
5878#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
5879 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
5880 pool, addrA, addrB, size, 0)
5881
5882/* Return 1 if a mempool exists, else 0. */
5883#define VALGRIND_MEMPOOL_EXISTS(pool) \
5884 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5885 VG_USERREQ__MEMPOOL_EXISTS, \
5886 pool, 0, 0, 0, 0)
5887
5888/* Mark a piece of memory as being a stack. Returns a stack id. */
5889#define VALGRIND_STACK_REGISTER(start, end) \
5890 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5891 VG_USERREQ__STACK_REGISTER, \
5892 start, end, 0, 0, 0)
5893
5894/* Unmark the piece of memory associated with a stack id as being a
5895 stack. */
5896#define VALGRIND_STACK_DEREGISTER(id) \
5897 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
5898 id, 0, 0, 0, 0)
5899
5900/* Change the start and end address of the stack id. */
5901#define VALGRIND_STACK_CHANGE(id, start, end) \
5902 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
5903 id, start, end, 0, 0)
5904
5905/* Load PDB debug info for Wine PE image_map. */
5906#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
5907 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
5908 fd, ptr, total_size, delta, 0)
5909
5910/* Map a code address to a source file name and line number. buf64
5911 must point to a 64-byte buffer in the caller's address space. The
5912 result will be dumped in there and is guaranteed to be zero
5913 terminated. If no info is found, the first byte is set to zero. */
5914#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
5915 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5916 VG_USERREQ__MAP_IP_TO_SRCLOC, \
5917 addr, buf64, 0, 0, 0)
5918
5919/* Disable error reporting for this thread. Behaves in a stack like
5920 way, so you can safely call this multiple times provided that
5921 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
5922 to re-enable reporting. The first call of this macro disables
5923 reporting. Subsequent calls have no effect except to increase the
5924 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
5925 reporting. Child threads do not inherit this setting from their
5926 parents -- they are always created with reporting enabled. */
5927#define VALGRIND_DISABLE_ERROR_REPORTING \
5928 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5929 1, 0, 0, 0, 0)
5930
5931/* Re-enable error reporting, as per comments on
5932 VALGRIND_DISABLE_ERROR_REPORTING. */
5933#define VALGRIND_ENABLE_ERROR_REPORTING \
5934 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5935 -1, 0, 0, 0, 0)
5936
5937/* Execute a monitor command from the client program.
5938 If a connection is opened with GDB, the output will be sent
5939 according to the output mode set for vgdb.
5940 If no connection is opened, output will go to the log output.
5941 Returns 1 if command not recognised, 0 otherwise. */
5942#define VALGRIND_MONITOR_COMMAND(command) \
5943 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
5944 command, 0, 0, 0, 0)
5945
5946
5947#undef PLAT_x86_darwin
5948#undef PLAT_amd64_darwin
5949#undef PLAT_x86_win32
5950#undef PLAT_amd64_win64
5951#undef PLAT_x86_linux
5952#undef PLAT_amd64_linux
5953#undef PLAT_ppc32_linux
5954#undef PLAT_ppc64_linux
5955#undef PLAT_arm_linux
5956#undef PLAT_s390x_linux
5957#undef PLAT_mips32_linux
5958#undef PLAT_mips64_linux
5959
5960#endif /* __VALGRIND_H */