]> jfr.im git - irc/quakenet/newserv.git/blob - lib/valgrind.h
BUILD: add require-all build mode
[irc/quakenet/newserv.git] / lib / valgrind.h
1 /* -*- c -*-
2 ----------------------------------------------------------------
3
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
9
10 ----------------------------------------------------------------
11
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
14
15 Copyright (C) 2000-2011 Julian Seward. All rights reserved.
16
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
20
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
23
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
28
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
31
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
35
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47
48 ----------------------------------------------------------------
49
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
54
55 ----------------------------------------------------------------
56 */
57
58
59 /* This file is for inclusion into client (your!) code.
60
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
63
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
72
73 #ifndef __VALGRIND_H
74 #define __VALGRIND_H
75
76
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
80
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
86
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90 */
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 6
93
94
95 #include <stdarg.h>
96
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
99 use "__asm__"). */
100
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
107
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
110 */
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_x86_linux
115 #undef PLAT_amd64_linux
116 #undef PLAT_ppc32_linux
117 #undef PLAT_ppc64_linux
118 #undef PLAT_arm_linux
119 #undef PLAT_s390x_linux
120
121
122 #if defined(__APPLE__) && defined(__i386__)
123 # define PLAT_x86_darwin 1
124 #elif defined(__APPLE__) && defined(__x86_64__)
125 # define PLAT_amd64_darwin 1
126 #elif defined(__MINGW32__) || defined(__CYGWIN32__) \
127 || (defined(_WIN32) && defined(_M_IX86))
128 # define PLAT_x86_win32 1
129 #elif defined(__linux__) && defined(__i386__)
130 # define PLAT_x86_linux 1
131 #elif defined(__linux__) && defined(__x86_64__)
132 # define PLAT_amd64_linux 1
133 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
134 # define PLAT_ppc32_linux 1
135 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
136 # define PLAT_ppc64_linux 1
137 #elif defined(__linux__) && defined(__arm__)
138 # define PLAT_arm_linux 1
139 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
140 # define PLAT_s390x_linux 1
141 #else
142 /* If we're not compiling for our target platform, don't generate
143 any inline asms. */
144 # if !defined(NVALGRIND)
145 # define NVALGRIND 1
146 # endif
147 #endif
148
149
150 /* ------------------------------------------------------------------ */
151 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
152 /* in here of use to end-users -- skip to the next section. */
153 /* ------------------------------------------------------------------ */
154
155 /*
156 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
157 * request. Accepts both pointers and integers as arguments.
158 *
159 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
160 * client request that does not return a value.
161
162 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
163 * client request and whose value equals the client request result. Accepts
164 * both pointers and integers as arguments. Note that such calls are not
165 * necessarily pure functions -- they may have side effects.
166 */
167
168 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
169 _zzq_request, _zzq_arg1, _zzq_arg2, \
170 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
171 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
172 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
173 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
174
175 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
176 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
177 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
178 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
179 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
180
181 #if defined(NVALGRIND)
182
183 /* Define NVALGRIND to completely remove the Valgrind magic sequence
184 from the compiled code (analogous to NDEBUG's effects on
185 assert()) */
186 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
187 _zzq_default, _zzq_request, \
188 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
189 (_zzq_default)
190
191 #else /* ! NVALGRIND */
192
193 /* The following defines the magic code sequences which the JITter
194 spots and handles magically. Don't look too closely at them as
195 they will rot your brain.
196
197 The assembly code sequences for all architectures is in this one
198 file. This is because this file must be stand-alone, and we don't
199 want to have multiple files.
200
201 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
202 value gets put in the return slot, so that everything works when
203 this is executed not under Valgrind. Args are passed in a memory
204 block, and so there's no intrinsic limit to the number that could
205 be passed, but it's currently five.
206
207 The macro args are:
208 _zzq_rlval result lvalue
209 _zzq_default default value (result returned when running on real CPU)
210 _zzq_request request code
211 _zzq_arg1..5 request params
212
213 The other two macros are used to support function wrapping, and are
214 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
215 guest's NRADDR pseudo-register and whatever other information is
216 needed to safely run the call original from the wrapper: on
217 ppc64-linux, the R2 value at the divert point is also needed. This
218 information is abstracted into a user-visible type, OrigFn.
219
220 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
221 guest, but guarantees that the branch instruction will not be
222 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
223 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
224 complete inline asm, since it needs to be combined with more magic
225 inline asm stuff to be useful.
226 */
227
228 /* ------------------------- x86-{linux,darwin} ---------------- */
229
230 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
231 || (defined(PLAT_x86_win32) && defined(__GNUC__))
232
233 typedef
234 struct {
235 unsigned int nraddr; /* where's the code? */
236 }
237 OrigFn;
238
239 #define __SPECIAL_INSTRUCTION_PREAMBLE \
240 "roll $3, %%edi ; roll $13, %%edi\n\t" \
241 "roll $29, %%edi ; roll $19, %%edi\n\t"
242
243 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
244 _zzq_default, _zzq_request, \
245 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
246 __extension__ \
247 ({volatile unsigned int _zzq_args[6]; \
248 volatile unsigned int _zzq_result; \
249 _zzq_args[0] = (unsigned int)(_zzq_request); \
250 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
251 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
252 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
253 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
254 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
255 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
256 /* %EDX = client_request ( %EAX ) */ \
257 "xchgl %%ebx,%%ebx" \
258 : "=d" (_zzq_result) \
259 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
260 : "cc", "memory" \
261 ); \
262 _zzq_result; \
263 })
264
265 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
266 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
267 volatile unsigned int __addr; \
268 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
269 /* %EAX = guest_NRADDR */ \
270 "xchgl %%ecx,%%ecx" \
271 : "=a" (__addr) \
272 : \
273 : "cc", "memory" \
274 ); \
275 _zzq_orig->nraddr = __addr; \
276 }
277
278 #define VALGRIND_CALL_NOREDIR_EAX \
279 __SPECIAL_INSTRUCTION_PREAMBLE \
280 /* call-noredir *%EAX */ \
281 "xchgl %%edx,%%edx\n\t"
282 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
283
284 /* ------------------------- x86-Win32 ------------------------- */
285
286 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
287
288 typedef
289 struct {
290 unsigned int nraddr; /* where's the code? */
291 }
292 OrigFn;
293
294 #if defined(_MSC_VER)
295
296 #define __SPECIAL_INSTRUCTION_PREAMBLE \
297 __asm rol edi, 3 __asm rol edi, 13 \
298 __asm rol edi, 29 __asm rol edi, 19
299
300 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
301 _zzq_default, _zzq_request, \
302 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
303 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
304 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
305 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
306 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
307
308 static __inline uintptr_t
309 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
310 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
311 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
312 uintptr_t _zzq_arg5)
313 {
314 volatile uintptr_t _zzq_args[6];
315 volatile unsigned int _zzq_result;
316 _zzq_args[0] = (uintptr_t)(_zzq_request);
317 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
318 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
319 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
320 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
321 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
322 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
323 __SPECIAL_INSTRUCTION_PREAMBLE
324 /* %EDX = client_request ( %EAX ) */
325 __asm xchg ebx,ebx
326 __asm mov _zzq_result, edx
327 }
328 return _zzq_result;
329 }
330
331 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
332 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
333 volatile unsigned int __addr; \
334 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
335 /* %EAX = guest_NRADDR */ \
336 __asm xchg ecx,ecx \
337 __asm mov __addr, eax \
338 } \
339 _zzq_orig->nraddr = __addr; \
340 }
341
342 #define VALGRIND_CALL_NOREDIR_EAX ERROR
343
344 #else
345 #error Unsupported compiler.
346 #endif
347
348 #endif /* PLAT_x86_win32 */
349
350 /* ------------------------ amd64-{linux,darwin} --------------- */
351
352 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
353
354 typedef
355 struct {
356 unsigned long long int nraddr; /* where's the code? */
357 }
358 OrigFn;
359
360 #define __SPECIAL_INSTRUCTION_PREAMBLE \
361 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
362 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
363
364 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
365 _zzq_default, _zzq_request, \
366 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
367 __extension__ \
368 ({ volatile unsigned long long int _zzq_args[6]; \
369 volatile unsigned long long int _zzq_result; \
370 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
371 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
372 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
373 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
374 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
375 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
376 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
377 /* %RDX = client_request ( %RAX ) */ \
378 "xchgq %%rbx,%%rbx" \
379 : "=d" (_zzq_result) \
380 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
381 : "cc", "memory" \
382 ); \
383 _zzq_result; \
384 })
385
386 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
387 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
388 volatile unsigned long long int __addr; \
389 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
390 /* %RAX = guest_NRADDR */ \
391 "xchgq %%rcx,%%rcx" \
392 : "=a" (__addr) \
393 : \
394 : "cc", "memory" \
395 ); \
396 _zzq_orig->nraddr = __addr; \
397 }
398
399 #define VALGRIND_CALL_NOREDIR_RAX \
400 __SPECIAL_INSTRUCTION_PREAMBLE \
401 /* call-noredir *%RAX */ \
402 "xchgq %%rdx,%%rdx\n\t"
403 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
404
405 /* ------------------------ ppc32-linux ------------------------ */
406
407 #if defined(PLAT_ppc32_linux)
408
409 typedef
410 struct {
411 unsigned int nraddr; /* where's the code? */
412 }
413 OrigFn;
414
415 #define __SPECIAL_INSTRUCTION_PREAMBLE \
416 "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
417 "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
418
419 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
420 _zzq_default, _zzq_request, \
421 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
422 \
423 __extension__ \
424 ({ unsigned int _zzq_args[6]; \
425 unsigned int _zzq_result; \
426 unsigned int* _zzq_ptr; \
427 _zzq_args[0] = (unsigned int)(_zzq_request); \
428 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
429 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
430 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
431 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
432 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
433 _zzq_ptr = _zzq_args; \
434 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
435 "mr 4,%2\n\t" /*ptr*/ \
436 __SPECIAL_INSTRUCTION_PREAMBLE \
437 /* %R3 = client_request ( %R4 ) */ \
438 "or 1,1,1\n\t" \
439 "mr %0,3" /*result*/ \
440 : "=b" (_zzq_result) \
441 : "b" (_zzq_default), "b" (_zzq_ptr) \
442 : "cc", "memory", "r3", "r4"); \
443 _zzq_result; \
444 })
445
446 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
447 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
448 unsigned int __addr; \
449 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
450 /* %R3 = guest_NRADDR */ \
451 "or 2,2,2\n\t" \
452 "mr %0,3" \
453 : "=b" (__addr) \
454 : \
455 : "cc", "memory", "r3" \
456 ); \
457 _zzq_orig->nraddr = __addr; \
458 }
459
460 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
461 __SPECIAL_INSTRUCTION_PREAMBLE \
462 /* branch-and-link-to-noredir *%R11 */ \
463 "or 3,3,3\n\t"
464 #endif /* PLAT_ppc32_linux */
465
466 /* ------------------------ ppc64-linux ------------------------ */
467
468 #if defined(PLAT_ppc64_linux)
469
470 typedef
471 struct {
472 unsigned long long int nraddr; /* where's the code? */
473 unsigned long long int r2; /* what tocptr do we need? */
474 }
475 OrigFn;
476
477 #define __SPECIAL_INSTRUCTION_PREAMBLE \
478 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
479 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
480
481 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
482 _zzq_default, _zzq_request, \
483 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
484 \
485 __extension__ \
486 ({ unsigned long long int _zzq_args[6]; \
487 unsigned long long int _zzq_result; \
488 unsigned long long int* _zzq_ptr; \
489 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
490 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
491 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
492 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
493 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
494 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
495 _zzq_ptr = _zzq_args; \
496 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
497 "mr 4,%2\n\t" /*ptr*/ \
498 __SPECIAL_INSTRUCTION_PREAMBLE \
499 /* %R3 = client_request ( %R4 ) */ \
500 "or 1,1,1\n\t" \
501 "mr %0,3" /*result*/ \
502 : "=b" (_zzq_result) \
503 : "b" (_zzq_default), "b" (_zzq_ptr) \
504 : "cc", "memory", "r3", "r4"); \
505 _zzq_result; \
506 })
507
508 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
509 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
510 unsigned long long int __addr; \
511 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
512 /* %R3 = guest_NRADDR */ \
513 "or 2,2,2\n\t" \
514 "mr %0,3" \
515 : "=b" (__addr) \
516 : \
517 : "cc", "memory", "r3" \
518 ); \
519 _zzq_orig->nraddr = __addr; \
520 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
521 /* %R3 = guest_NRADDR_GPR2 */ \
522 "or 4,4,4\n\t" \
523 "mr %0,3" \
524 : "=b" (__addr) \
525 : \
526 : "cc", "memory", "r3" \
527 ); \
528 _zzq_orig->r2 = __addr; \
529 }
530
531 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
532 __SPECIAL_INSTRUCTION_PREAMBLE \
533 /* branch-and-link-to-noredir *%R11 */ \
534 "or 3,3,3\n\t"
535
536 #endif /* PLAT_ppc64_linux */
537
538 /* ------------------------- arm-linux ------------------------- */
539
540 #if defined(PLAT_arm_linux)
541
542 typedef
543 struct {
544 unsigned int nraddr; /* where's the code? */
545 }
546 OrigFn;
547
548 #define __SPECIAL_INSTRUCTION_PREAMBLE \
549 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
550 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
551
552 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
553 _zzq_default, _zzq_request, \
554 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
555 \
556 __extension__ \
557 ({volatile unsigned int _zzq_args[6]; \
558 volatile unsigned int _zzq_result; \
559 _zzq_args[0] = (unsigned int)(_zzq_request); \
560 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
561 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
562 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
563 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
564 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
565 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
566 "mov r4, %2\n\t" /*ptr*/ \
567 __SPECIAL_INSTRUCTION_PREAMBLE \
568 /* R3 = client_request ( R4 ) */ \
569 "orr r10, r10, r10\n\t" \
570 "mov %0, r3" /*result*/ \
571 : "=r" (_zzq_result) \
572 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
573 : "cc","memory", "r3", "r4"); \
574 _zzq_result; \
575 })
576
577 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
578 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
579 unsigned int __addr; \
580 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
581 /* R3 = guest_NRADDR */ \
582 "orr r11, r11, r11\n\t" \
583 "mov %0, r3" \
584 : "=r" (__addr) \
585 : \
586 : "cc", "memory", "r3" \
587 ); \
588 _zzq_orig->nraddr = __addr; \
589 }
590
591 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
592 __SPECIAL_INSTRUCTION_PREAMBLE \
593 /* branch-and-link-to-noredir *%R4 */ \
594 "orr r12, r12, r12\n\t"
595
596 #endif /* PLAT_arm_linux */
597
598 /* ------------------------ s390x-linux ------------------------ */
599
600 #if defined(PLAT_s390x_linux)
601
602 typedef
603 struct {
604 unsigned long long int nraddr; /* where's the code? */
605 }
606 OrigFn;
607
608 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
609 * code. This detection is implemented in platform specific toIR.c
610 * (e.g. VEX/priv/guest_s390_decoder.c).
611 */
612 #define __SPECIAL_INSTRUCTION_PREAMBLE \
613 "lr 15,15\n\t" \
614 "lr 1,1\n\t" \
615 "lr 2,2\n\t" \
616 "lr 3,3\n\t"
617
618 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
619 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
620 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
621
622 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
623 _zzq_default, _zzq_request, \
624 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
625 __extension__ \
626 ({volatile unsigned long long int _zzq_args[6]; \
627 volatile unsigned long long int _zzq_result; \
628 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
629 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
630 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
631 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
632 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
633 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
634 __asm__ volatile(/* r2 = args */ \
635 "lgr 2,%1\n\t" \
636 /* r3 = default */ \
637 "lgr 3,%2\n\t" \
638 __SPECIAL_INSTRUCTION_PREAMBLE \
639 __CLIENT_REQUEST_CODE \
640 /* results = r3 */ \
641 "lgr %0, 3\n\t" \
642 : "=d" (_zzq_result) \
643 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
644 : "cc", "2", "3", "memory" \
645 ); \
646 _zzq_result; \
647 })
648
649 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
650 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
651 volatile unsigned long long int __addr; \
652 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
653 __GET_NR_CONTEXT_CODE \
654 "lgr %0, 3\n\t" \
655 : "=a" (__addr) \
656 : \
657 : "cc", "3", "memory" \
658 ); \
659 _zzq_orig->nraddr = __addr; \
660 }
661
662 #define VALGRIND_CALL_NOREDIR_R1 \
663 __SPECIAL_INSTRUCTION_PREAMBLE \
664 __CALL_NO_REDIR_CODE
665
666 #endif /* PLAT_s390x_linux */
667
668 /* Insert assembly code for other platforms here... */
669
670 #endif /* NVALGRIND */
671
672
673 /* ------------------------------------------------------------------ */
674 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
675 /* ugly. It's the least-worst tradeoff I can think of. */
676 /* ------------------------------------------------------------------ */
677
678 /* This section defines magic (a.k.a appalling-hack) macros for doing
679 guaranteed-no-redirection macros, so as to get from function
680 wrappers to the functions they are wrapping. The whole point is to
681 construct standard call sequences, but to do the call itself with a
682 special no-redirect call pseudo-instruction that the JIT
683 understands and handles specially. This section is long and
684 repetitious, and I can't see a way to make it shorter.
685
686 The naming scheme is as follows:
687
688 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
689
690 'W' stands for "word" and 'v' for "void". Hence there are
691 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
692 and for each, the possibility of returning a word-typed result, or
693 no result.
694 */
695
696 /* Use these to write the name of your wrapper. NOTE: duplicates
697 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
698 the default behaviour equivalance class tag "0000" into the name.
699 See pub_tool_redir.h for details -- normally you don't need to
700 think about this, though. */
701
702 /* Use an extra level of macroisation so as to ensure the soname/fnname
703 args are fully macro-expanded before pasting them together. */
704 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
705
706 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
707 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
708
709 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
710 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
711
712 /* Use this macro from within a wrapper function to collect the
713 context (address and possibly other info) of the original function.
714 Once you have that you can then use it in one of the CALL_FN_
715 macros. The type of the argument _lval is OrigFn. */
716 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
717
718 /* Derivatives of the main macros below, for calling functions
719 returning void. */
720
721 #define CALL_FN_v_v(fnptr) \
722 do { volatile unsigned long _junk; \
723 CALL_FN_W_v(_junk,fnptr); } while (0)
724
725 #define CALL_FN_v_W(fnptr, arg1) \
726 do { volatile unsigned long _junk; \
727 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
728
729 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
730 do { volatile unsigned long _junk; \
731 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
732
733 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
734 do { volatile unsigned long _junk; \
735 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
736
737 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
738 do { volatile unsigned long _junk; \
739 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
740
741 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
742 do { volatile unsigned long _junk; \
743 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
744
745 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
746 do { volatile unsigned long _junk; \
747 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
748
749 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
750 do { volatile unsigned long _junk; \
751 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
752
753 /* ------------------------- x86-{linux,darwin} ---------------- */
754
755 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
756
757 /* These regs are trashed by the hidden call. No need to mention eax
758 as gcc can already see that, plus causes gcc to bomb. */
759 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
760
761 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
762 long) == 4. */
763
764 #define CALL_FN_W_v(lval, orig) \
765 do { \
766 volatile OrigFn _orig = (orig); \
767 volatile unsigned long _argvec[1]; \
768 volatile unsigned long _res; \
769 _argvec[0] = (unsigned long)_orig.nraddr; \
770 __asm__ volatile( \
771 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
772 VALGRIND_CALL_NOREDIR_EAX \
773 : /*out*/ "=a" (_res) \
774 : /*in*/ "a" (&_argvec[0]) \
775 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
776 ); \
777 lval = (__typeof__(lval)) _res; \
778 } while (0)
779
780 #define CALL_FN_W_W(lval, orig, arg1) \
781 do { \
782 volatile OrigFn _orig = (orig); \
783 volatile unsigned long _argvec[2]; \
784 volatile unsigned long _res; \
785 _argvec[0] = (unsigned long)_orig.nraddr; \
786 _argvec[1] = (unsigned long)(arg1); \
787 __asm__ volatile( \
788 "subl $12, %%esp\n\t" \
789 "pushl 4(%%eax)\n\t" \
790 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
791 VALGRIND_CALL_NOREDIR_EAX \
792 "addl $16, %%esp\n" \
793 : /*out*/ "=a" (_res) \
794 : /*in*/ "a" (&_argvec[0]) \
795 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
796 ); \
797 lval = (__typeof__(lval)) _res; \
798 } while (0)
799
800 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
801 do { \
802 volatile OrigFn _orig = (orig); \
803 volatile unsigned long _argvec[3]; \
804 volatile unsigned long _res; \
805 _argvec[0] = (unsigned long)_orig.nraddr; \
806 _argvec[1] = (unsigned long)(arg1); \
807 _argvec[2] = (unsigned long)(arg2); \
808 __asm__ volatile( \
809 "subl $8, %%esp\n\t" \
810 "pushl 8(%%eax)\n\t" \
811 "pushl 4(%%eax)\n\t" \
812 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
813 VALGRIND_CALL_NOREDIR_EAX \
814 "addl $16, %%esp\n" \
815 : /*out*/ "=a" (_res) \
816 : /*in*/ "a" (&_argvec[0]) \
817 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
818 ); \
819 lval = (__typeof__(lval)) _res; \
820 } while (0)
821
822 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
823 do { \
824 volatile OrigFn _orig = (orig); \
825 volatile unsigned long _argvec[4]; \
826 volatile unsigned long _res; \
827 _argvec[0] = (unsigned long)_orig.nraddr; \
828 _argvec[1] = (unsigned long)(arg1); \
829 _argvec[2] = (unsigned long)(arg2); \
830 _argvec[3] = (unsigned long)(arg3); \
831 __asm__ volatile( \
832 "subl $4, %%esp\n\t" \
833 "pushl 12(%%eax)\n\t" \
834 "pushl 8(%%eax)\n\t" \
835 "pushl 4(%%eax)\n\t" \
836 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
837 VALGRIND_CALL_NOREDIR_EAX \
838 "addl $16, %%esp\n" \
839 : /*out*/ "=a" (_res) \
840 : /*in*/ "a" (&_argvec[0]) \
841 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
842 ); \
843 lval = (__typeof__(lval)) _res; \
844 } while (0)
845
846 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
847 do { \
848 volatile OrigFn _orig = (orig); \
849 volatile unsigned long _argvec[5]; \
850 volatile unsigned long _res; \
851 _argvec[0] = (unsigned long)_orig.nraddr; \
852 _argvec[1] = (unsigned long)(arg1); \
853 _argvec[2] = (unsigned long)(arg2); \
854 _argvec[3] = (unsigned long)(arg3); \
855 _argvec[4] = (unsigned long)(arg4); \
856 __asm__ volatile( \
857 "pushl 16(%%eax)\n\t" \
858 "pushl 12(%%eax)\n\t" \
859 "pushl 8(%%eax)\n\t" \
860 "pushl 4(%%eax)\n\t" \
861 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
862 VALGRIND_CALL_NOREDIR_EAX \
863 "addl $16, %%esp\n" \
864 : /*out*/ "=a" (_res) \
865 : /*in*/ "a" (&_argvec[0]) \
866 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
867 ); \
868 lval = (__typeof__(lval)) _res; \
869 } while (0)
870
871 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
872 do { \
873 volatile OrigFn _orig = (orig); \
874 volatile unsigned long _argvec[6]; \
875 volatile unsigned long _res; \
876 _argvec[0] = (unsigned long)_orig.nraddr; \
877 _argvec[1] = (unsigned long)(arg1); \
878 _argvec[2] = (unsigned long)(arg2); \
879 _argvec[3] = (unsigned long)(arg3); \
880 _argvec[4] = (unsigned long)(arg4); \
881 _argvec[5] = (unsigned long)(arg5); \
882 __asm__ volatile( \
883 "subl $12, %%esp\n\t" \
884 "pushl 20(%%eax)\n\t" \
885 "pushl 16(%%eax)\n\t" \
886 "pushl 12(%%eax)\n\t" \
887 "pushl 8(%%eax)\n\t" \
888 "pushl 4(%%eax)\n\t" \
889 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
890 VALGRIND_CALL_NOREDIR_EAX \
891 "addl $32, %%esp\n" \
892 : /*out*/ "=a" (_res) \
893 : /*in*/ "a" (&_argvec[0]) \
894 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
895 ); \
896 lval = (__typeof__(lval)) _res; \
897 } while (0)
898
899 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
900 do { \
901 volatile OrigFn _orig = (orig); \
902 volatile unsigned long _argvec[7]; \
903 volatile unsigned long _res; \
904 _argvec[0] = (unsigned long)_orig.nraddr; \
905 _argvec[1] = (unsigned long)(arg1); \
906 _argvec[2] = (unsigned long)(arg2); \
907 _argvec[3] = (unsigned long)(arg3); \
908 _argvec[4] = (unsigned long)(arg4); \
909 _argvec[5] = (unsigned long)(arg5); \
910 _argvec[6] = (unsigned long)(arg6); \
911 __asm__ volatile( \
912 "subl $8, %%esp\n\t" \
913 "pushl 24(%%eax)\n\t" \
914 "pushl 20(%%eax)\n\t" \
915 "pushl 16(%%eax)\n\t" \
916 "pushl 12(%%eax)\n\t" \
917 "pushl 8(%%eax)\n\t" \
918 "pushl 4(%%eax)\n\t" \
919 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
920 VALGRIND_CALL_NOREDIR_EAX \
921 "addl $32, %%esp\n" \
922 : /*out*/ "=a" (_res) \
923 : /*in*/ "a" (&_argvec[0]) \
924 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
925 ); \
926 lval = (__typeof__(lval)) _res; \
927 } while (0)
928
929 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
930 arg7) \
931 do { \
932 volatile OrigFn _orig = (orig); \
933 volatile unsigned long _argvec[8]; \
934 volatile unsigned long _res; \
935 _argvec[0] = (unsigned long)_orig.nraddr; \
936 _argvec[1] = (unsigned long)(arg1); \
937 _argvec[2] = (unsigned long)(arg2); \
938 _argvec[3] = (unsigned long)(arg3); \
939 _argvec[4] = (unsigned long)(arg4); \
940 _argvec[5] = (unsigned long)(arg5); \
941 _argvec[6] = (unsigned long)(arg6); \
942 _argvec[7] = (unsigned long)(arg7); \
943 __asm__ volatile( \
944 "subl $4, %%esp\n\t" \
945 "pushl 28(%%eax)\n\t" \
946 "pushl 24(%%eax)\n\t" \
947 "pushl 20(%%eax)\n\t" \
948 "pushl 16(%%eax)\n\t" \
949 "pushl 12(%%eax)\n\t" \
950 "pushl 8(%%eax)\n\t" \
951 "pushl 4(%%eax)\n\t" \
952 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
953 VALGRIND_CALL_NOREDIR_EAX \
954 "addl $32, %%esp\n" \
955 : /*out*/ "=a" (_res) \
956 : /*in*/ "a" (&_argvec[0]) \
957 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
958 ); \
959 lval = (__typeof__(lval)) _res; \
960 } while (0)
961
962 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
963 arg7,arg8) \
964 do { \
965 volatile OrigFn _orig = (orig); \
966 volatile unsigned long _argvec[9]; \
967 volatile unsigned long _res; \
968 _argvec[0] = (unsigned long)_orig.nraddr; \
969 _argvec[1] = (unsigned long)(arg1); \
970 _argvec[2] = (unsigned long)(arg2); \
971 _argvec[3] = (unsigned long)(arg3); \
972 _argvec[4] = (unsigned long)(arg4); \
973 _argvec[5] = (unsigned long)(arg5); \
974 _argvec[6] = (unsigned long)(arg6); \
975 _argvec[7] = (unsigned long)(arg7); \
976 _argvec[8] = (unsigned long)(arg8); \
977 __asm__ volatile( \
978 "pushl 32(%%eax)\n\t" \
979 "pushl 28(%%eax)\n\t" \
980 "pushl 24(%%eax)\n\t" \
981 "pushl 20(%%eax)\n\t" \
982 "pushl 16(%%eax)\n\t" \
983 "pushl 12(%%eax)\n\t" \
984 "pushl 8(%%eax)\n\t" \
985 "pushl 4(%%eax)\n\t" \
986 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
987 VALGRIND_CALL_NOREDIR_EAX \
988 "addl $32, %%esp\n" \
989 : /*out*/ "=a" (_res) \
990 : /*in*/ "a" (&_argvec[0]) \
991 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
992 ); \
993 lval = (__typeof__(lval)) _res; \
994 } while (0)
995
996 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
997 arg7,arg8,arg9) \
998 do { \
999 volatile OrigFn _orig = (orig); \
1000 volatile unsigned long _argvec[10]; \
1001 volatile unsigned long _res; \
1002 _argvec[0] = (unsigned long)_orig.nraddr; \
1003 _argvec[1] = (unsigned long)(arg1); \
1004 _argvec[2] = (unsigned long)(arg2); \
1005 _argvec[3] = (unsigned long)(arg3); \
1006 _argvec[4] = (unsigned long)(arg4); \
1007 _argvec[5] = (unsigned long)(arg5); \
1008 _argvec[6] = (unsigned long)(arg6); \
1009 _argvec[7] = (unsigned long)(arg7); \
1010 _argvec[8] = (unsigned long)(arg8); \
1011 _argvec[9] = (unsigned long)(arg9); \
1012 __asm__ volatile( \
1013 "subl $12, %%esp\n\t" \
1014 "pushl 36(%%eax)\n\t" \
1015 "pushl 32(%%eax)\n\t" \
1016 "pushl 28(%%eax)\n\t" \
1017 "pushl 24(%%eax)\n\t" \
1018 "pushl 20(%%eax)\n\t" \
1019 "pushl 16(%%eax)\n\t" \
1020 "pushl 12(%%eax)\n\t" \
1021 "pushl 8(%%eax)\n\t" \
1022 "pushl 4(%%eax)\n\t" \
1023 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1024 VALGRIND_CALL_NOREDIR_EAX \
1025 "addl $48, %%esp\n" \
1026 : /*out*/ "=a" (_res) \
1027 : /*in*/ "a" (&_argvec[0]) \
1028 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1029 ); \
1030 lval = (__typeof__(lval)) _res; \
1031 } while (0)
1032
1033 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1034 arg7,arg8,arg9,arg10) \
1035 do { \
1036 volatile OrigFn _orig = (orig); \
1037 volatile unsigned long _argvec[11]; \
1038 volatile unsigned long _res; \
1039 _argvec[0] = (unsigned long)_orig.nraddr; \
1040 _argvec[1] = (unsigned long)(arg1); \
1041 _argvec[2] = (unsigned long)(arg2); \
1042 _argvec[3] = (unsigned long)(arg3); \
1043 _argvec[4] = (unsigned long)(arg4); \
1044 _argvec[5] = (unsigned long)(arg5); \
1045 _argvec[6] = (unsigned long)(arg6); \
1046 _argvec[7] = (unsigned long)(arg7); \
1047 _argvec[8] = (unsigned long)(arg8); \
1048 _argvec[9] = (unsigned long)(arg9); \
1049 _argvec[10] = (unsigned long)(arg10); \
1050 __asm__ volatile( \
1051 "subl $8, %%esp\n\t" \
1052 "pushl 40(%%eax)\n\t" \
1053 "pushl 36(%%eax)\n\t" \
1054 "pushl 32(%%eax)\n\t" \
1055 "pushl 28(%%eax)\n\t" \
1056 "pushl 24(%%eax)\n\t" \
1057 "pushl 20(%%eax)\n\t" \
1058 "pushl 16(%%eax)\n\t" \
1059 "pushl 12(%%eax)\n\t" \
1060 "pushl 8(%%eax)\n\t" \
1061 "pushl 4(%%eax)\n\t" \
1062 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1063 VALGRIND_CALL_NOREDIR_EAX \
1064 "addl $48, %%esp\n" \
1065 : /*out*/ "=a" (_res) \
1066 : /*in*/ "a" (&_argvec[0]) \
1067 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1068 ); \
1069 lval = (__typeof__(lval)) _res; \
1070 } while (0)
1071
1072 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1073 arg6,arg7,arg8,arg9,arg10, \
1074 arg11) \
1075 do { \
1076 volatile OrigFn _orig = (orig); \
1077 volatile unsigned long _argvec[12]; \
1078 volatile unsigned long _res; \
1079 _argvec[0] = (unsigned long)_orig.nraddr; \
1080 _argvec[1] = (unsigned long)(arg1); \
1081 _argvec[2] = (unsigned long)(arg2); \
1082 _argvec[3] = (unsigned long)(arg3); \
1083 _argvec[4] = (unsigned long)(arg4); \
1084 _argvec[5] = (unsigned long)(arg5); \
1085 _argvec[6] = (unsigned long)(arg6); \
1086 _argvec[7] = (unsigned long)(arg7); \
1087 _argvec[8] = (unsigned long)(arg8); \
1088 _argvec[9] = (unsigned long)(arg9); \
1089 _argvec[10] = (unsigned long)(arg10); \
1090 _argvec[11] = (unsigned long)(arg11); \
1091 __asm__ volatile( \
1092 "subl $4, %%esp\n\t" \
1093 "pushl 44(%%eax)\n\t" \
1094 "pushl 40(%%eax)\n\t" \
1095 "pushl 36(%%eax)\n\t" \
1096 "pushl 32(%%eax)\n\t" \
1097 "pushl 28(%%eax)\n\t" \
1098 "pushl 24(%%eax)\n\t" \
1099 "pushl 20(%%eax)\n\t" \
1100 "pushl 16(%%eax)\n\t" \
1101 "pushl 12(%%eax)\n\t" \
1102 "pushl 8(%%eax)\n\t" \
1103 "pushl 4(%%eax)\n\t" \
1104 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1105 VALGRIND_CALL_NOREDIR_EAX \
1106 "addl $48, %%esp\n" \
1107 : /*out*/ "=a" (_res) \
1108 : /*in*/ "a" (&_argvec[0]) \
1109 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1110 ); \
1111 lval = (__typeof__(lval)) _res; \
1112 } while (0)
1113
1114 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1115 arg6,arg7,arg8,arg9,arg10, \
1116 arg11,arg12) \
1117 do { \
1118 volatile OrigFn _orig = (orig); \
1119 volatile unsigned long _argvec[13]; \
1120 volatile unsigned long _res; \
1121 _argvec[0] = (unsigned long)_orig.nraddr; \
1122 _argvec[1] = (unsigned long)(arg1); \
1123 _argvec[2] = (unsigned long)(arg2); \
1124 _argvec[3] = (unsigned long)(arg3); \
1125 _argvec[4] = (unsigned long)(arg4); \
1126 _argvec[5] = (unsigned long)(arg5); \
1127 _argvec[6] = (unsigned long)(arg6); \
1128 _argvec[7] = (unsigned long)(arg7); \
1129 _argvec[8] = (unsigned long)(arg8); \
1130 _argvec[9] = (unsigned long)(arg9); \
1131 _argvec[10] = (unsigned long)(arg10); \
1132 _argvec[11] = (unsigned long)(arg11); \
1133 _argvec[12] = (unsigned long)(arg12); \
1134 __asm__ volatile( \
1135 "pushl 48(%%eax)\n\t" \
1136 "pushl 44(%%eax)\n\t" \
1137 "pushl 40(%%eax)\n\t" \
1138 "pushl 36(%%eax)\n\t" \
1139 "pushl 32(%%eax)\n\t" \
1140 "pushl 28(%%eax)\n\t" \
1141 "pushl 24(%%eax)\n\t" \
1142 "pushl 20(%%eax)\n\t" \
1143 "pushl 16(%%eax)\n\t" \
1144 "pushl 12(%%eax)\n\t" \
1145 "pushl 8(%%eax)\n\t" \
1146 "pushl 4(%%eax)\n\t" \
1147 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1148 VALGRIND_CALL_NOREDIR_EAX \
1149 "addl $48, %%esp\n" \
1150 : /*out*/ "=a" (_res) \
1151 : /*in*/ "a" (&_argvec[0]) \
1152 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1153 ); \
1154 lval = (__typeof__(lval)) _res; \
1155 } while (0)
1156
1157 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1158
1159 /* ------------------------ amd64-{linux,darwin} --------------- */
1160
1161 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1162
1163 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1164
1165 /* These regs are trashed by the hidden call. */
1166 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1167 "rdi", "r8", "r9", "r10", "r11"
1168
1169 /* This is all pretty complex. It's so as to make stack unwinding
1170 work reliably. See bug 243270. The basic problem is the sub and
1171 add of 128 of %rsp in all of the following macros. If gcc believes
1172 the CFA is in %rsp, then unwinding may fail, because what's at the
1173 CFA is not what gcc "expected" when it constructs the CFIs for the
1174 places where the macros are instantiated.
1175
1176 But we can't just add a CFI annotation to increase the CFA offset
1177 by 128, to match the sub of 128 from %rsp, because we don't know
1178 whether gcc has chosen %rsp as the CFA at that point, or whether it
1179 has chosen some other register (eg, %rbp). In the latter case,
1180 adding a CFI annotation to change the CFA offset is simply wrong.
1181
1182 So the solution is to get hold of the CFA using
1183 __builtin_dwarf_cfa(), put it in a known register, and add a
1184 CFI annotation to say what the register is. We choose %rbp for
1185 this (perhaps perversely), because:
1186
1187 (1) %rbp is already subject to unwinding. If a new register was
1188 chosen then the unwinder would have to unwind it in all stack
1189 traces, which is expensive, and
1190
1191 (2) %rbp is already subject to precise exception updates in the
1192 JIT. If a new register was chosen, we'd have to have precise
1193 exceptions for it too, which reduces performance of the
1194 generated code.
1195
1196 However .. one extra complication. We can't just whack the result
1197 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1198 list of trashed registers at the end of the inline assembly
1199 fragments; gcc won't allow %rbp to appear in that list. Hence
1200 instead we need to stash %rbp in %r15 for the duration of the asm,
1201 and say that %r15 is trashed instead. gcc seems happy to go with
1202 that.
1203
1204 Oh .. and this all needs to be conditionalised so that it is
1205 unchanged from before this commit, when compiled with older gccs
1206 that don't support __builtin_dwarf_cfa. Furthermore, since
1207 this header file is freestanding, it has to be independent of
1208 config.h, and so the following conditionalisation cannot depend on
1209 configure time checks.
1210
1211 Although it's not clear from
1212 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1213 this expression excludes Darwin.
1214 .cfi directives in Darwin assembly appear to be completely
1215 different and I haven't investigated how they work.
1216
1217 For even more entertainment value, note we have to use the
1218 completely undocumented __builtin_dwarf_cfa(), which appears to
1219 really compute the CFA, whereas __builtin_frame_address(0) claims
1220 to but actually doesn't. See
1221 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1222 */
1223 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1224 # define __FRAME_POINTER \
1225 ,"r"(__builtin_dwarf_cfa())
1226 # define VALGRIND_CFI_PROLOGUE \
1227 "movq %%rbp, %%r15\n\t" \
1228 "movq %2, %%rbp\n\t" \
1229 ".cfi_remember_state\n\t" \
1230 ".cfi_def_cfa rbp, 0\n\t"
1231 # define VALGRIND_CFI_EPILOGUE \
1232 "movq %%r15, %%rbp\n\t" \
1233 ".cfi_restore_state\n\t"
1234 #else
1235 # define __FRAME_POINTER
1236 # define VALGRIND_CFI_PROLOGUE
1237 # define VALGRIND_CFI_EPILOGUE
1238 #endif
1239
1240
1241 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1242 long) == 8. */
1243
1244 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1245 macros. In order not to trash the stack redzone, we need to drop
1246 %rsp by 128 before the hidden call, and restore afterwards. The
1247 nastyness is that it is only by luck that the stack still appears
1248 to be unwindable during the hidden call - since then the behaviour
1249 of any routine using this macro does not match what the CFI data
1250 says. Sigh.
1251
1252 Why is this important? Imagine that a wrapper has a stack
1253 allocated local, and passes to the hidden call, a pointer to it.
1254 Because gcc does not know about the hidden call, it may allocate
1255 that local in the redzone. Unfortunately the hidden call may then
1256 trash it before it comes to use it. So we must step clear of the
1257 redzone, for the duration of the hidden call, to make it safe.
1258
1259 Probably the same problem afflicts the other redzone-style ABIs too
1260 (ppc64-linux); but for those, the stack is
1261 self describing (none of this CFI nonsense) so at least messing
1262 with the stack pointer doesn't give a danger of non-unwindable
1263 stack. */
1264
1265 #define CALL_FN_W_v(lval, orig) \
1266 do { \
1267 volatile OrigFn _orig = (orig); \
1268 volatile unsigned long _argvec[1]; \
1269 volatile unsigned long _res; \
1270 _argvec[0] = (unsigned long)_orig.nraddr; \
1271 __asm__ volatile( \
1272 VALGRIND_CFI_PROLOGUE \
1273 "subq $128,%%rsp\n\t" \
1274 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1275 VALGRIND_CALL_NOREDIR_RAX \
1276 "addq $128,%%rsp\n\t" \
1277 VALGRIND_CFI_EPILOGUE \
1278 : /*out*/ "=a" (_res) \
1279 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1280 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1281 ); \
1282 lval = (__typeof__(lval)) _res; \
1283 } while (0)
1284
1285 #define CALL_FN_W_W(lval, orig, arg1) \
1286 do { \
1287 volatile OrigFn _orig = (orig); \
1288 volatile unsigned long _argvec[2]; \
1289 volatile unsigned long _res; \
1290 _argvec[0] = (unsigned long)_orig.nraddr; \
1291 _argvec[1] = (unsigned long)(arg1); \
1292 __asm__ volatile( \
1293 VALGRIND_CFI_PROLOGUE \
1294 "subq $128,%%rsp\n\t" \
1295 "movq 8(%%rax), %%rdi\n\t" \
1296 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1297 VALGRIND_CALL_NOREDIR_RAX \
1298 "addq $128,%%rsp\n\t" \
1299 VALGRIND_CFI_EPILOGUE \
1300 : /*out*/ "=a" (_res) \
1301 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1302 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1303 ); \
1304 lval = (__typeof__(lval)) _res; \
1305 } while (0)
1306
1307 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1308 do { \
1309 volatile OrigFn _orig = (orig); \
1310 volatile unsigned long _argvec[3]; \
1311 volatile unsigned long _res; \
1312 _argvec[0] = (unsigned long)_orig.nraddr; \
1313 _argvec[1] = (unsigned long)(arg1); \
1314 _argvec[2] = (unsigned long)(arg2); \
1315 __asm__ volatile( \
1316 VALGRIND_CFI_PROLOGUE \
1317 "subq $128,%%rsp\n\t" \
1318 "movq 16(%%rax), %%rsi\n\t" \
1319 "movq 8(%%rax), %%rdi\n\t" \
1320 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1321 VALGRIND_CALL_NOREDIR_RAX \
1322 "addq $128,%%rsp\n\t" \
1323 VALGRIND_CFI_EPILOGUE \
1324 : /*out*/ "=a" (_res) \
1325 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1326 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1327 ); \
1328 lval = (__typeof__(lval)) _res; \
1329 } while (0)
1330
1331 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1332 do { \
1333 volatile OrigFn _orig = (orig); \
1334 volatile unsigned long _argvec[4]; \
1335 volatile unsigned long _res; \
1336 _argvec[0] = (unsigned long)_orig.nraddr; \
1337 _argvec[1] = (unsigned long)(arg1); \
1338 _argvec[2] = (unsigned long)(arg2); \
1339 _argvec[3] = (unsigned long)(arg3); \
1340 __asm__ volatile( \
1341 VALGRIND_CFI_PROLOGUE \
1342 "subq $128,%%rsp\n\t" \
1343 "movq 24(%%rax), %%rdx\n\t" \
1344 "movq 16(%%rax), %%rsi\n\t" \
1345 "movq 8(%%rax), %%rdi\n\t" \
1346 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1347 VALGRIND_CALL_NOREDIR_RAX \
1348 "addq $128,%%rsp\n\t" \
1349 VALGRIND_CFI_EPILOGUE \
1350 : /*out*/ "=a" (_res) \
1351 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1352 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1353 ); \
1354 lval = (__typeof__(lval)) _res; \
1355 } while (0)
1356
1357 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1358 do { \
1359 volatile OrigFn _orig = (orig); \
1360 volatile unsigned long _argvec[5]; \
1361 volatile unsigned long _res; \
1362 _argvec[0] = (unsigned long)_orig.nraddr; \
1363 _argvec[1] = (unsigned long)(arg1); \
1364 _argvec[2] = (unsigned long)(arg2); \
1365 _argvec[3] = (unsigned long)(arg3); \
1366 _argvec[4] = (unsigned long)(arg4); \
1367 __asm__ volatile( \
1368 VALGRIND_CFI_PROLOGUE \
1369 "subq $128,%%rsp\n\t" \
1370 "movq 32(%%rax), %%rcx\n\t" \
1371 "movq 24(%%rax), %%rdx\n\t" \
1372 "movq 16(%%rax), %%rsi\n\t" \
1373 "movq 8(%%rax), %%rdi\n\t" \
1374 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1375 VALGRIND_CALL_NOREDIR_RAX \
1376 "addq $128,%%rsp\n\t" \
1377 VALGRIND_CFI_EPILOGUE \
1378 : /*out*/ "=a" (_res) \
1379 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1380 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1381 ); \
1382 lval = (__typeof__(lval)) _res; \
1383 } while (0)
1384
1385 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1386 do { \
1387 volatile OrigFn _orig = (orig); \
1388 volatile unsigned long _argvec[6]; \
1389 volatile unsigned long _res; \
1390 _argvec[0] = (unsigned long)_orig.nraddr; \
1391 _argvec[1] = (unsigned long)(arg1); \
1392 _argvec[2] = (unsigned long)(arg2); \
1393 _argvec[3] = (unsigned long)(arg3); \
1394 _argvec[4] = (unsigned long)(arg4); \
1395 _argvec[5] = (unsigned long)(arg5); \
1396 __asm__ volatile( \
1397 VALGRIND_CFI_PROLOGUE \
1398 "subq $128,%%rsp\n\t" \
1399 "movq 40(%%rax), %%r8\n\t" \
1400 "movq 32(%%rax), %%rcx\n\t" \
1401 "movq 24(%%rax), %%rdx\n\t" \
1402 "movq 16(%%rax), %%rsi\n\t" \
1403 "movq 8(%%rax), %%rdi\n\t" \
1404 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1405 VALGRIND_CALL_NOREDIR_RAX \
1406 "addq $128,%%rsp\n\t" \
1407 VALGRIND_CFI_EPILOGUE \
1408 : /*out*/ "=a" (_res) \
1409 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1410 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1411 ); \
1412 lval = (__typeof__(lval)) _res; \
1413 } while (0)
1414
1415 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1416 do { \
1417 volatile OrigFn _orig = (orig); \
1418 volatile unsigned long _argvec[7]; \
1419 volatile unsigned long _res; \
1420 _argvec[0] = (unsigned long)_orig.nraddr; \
1421 _argvec[1] = (unsigned long)(arg1); \
1422 _argvec[2] = (unsigned long)(arg2); \
1423 _argvec[3] = (unsigned long)(arg3); \
1424 _argvec[4] = (unsigned long)(arg4); \
1425 _argvec[5] = (unsigned long)(arg5); \
1426 _argvec[6] = (unsigned long)(arg6); \
1427 __asm__ volatile( \
1428 VALGRIND_CFI_PROLOGUE \
1429 "subq $128,%%rsp\n\t" \
1430 "movq 48(%%rax), %%r9\n\t" \
1431 "movq 40(%%rax), %%r8\n\t" \
1432 "movq 32(%%rax), %%rcx\n\t" \
1433 "movq 24(%%rax), %%rdx\n\t" \
1434 "movq 16(%%rax), %%rsi\n\t" \
1435 "movq 8(%%rax), %%rdi\n\t" \
1436 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1437 VALGRIND_CALL_NOREDIR_RAX \
1438 "addq $128,%%rsp\n\t" \
1439 VALGRIND_CFI_EPILOGUE \
1440 : /*out*/ "=a" (_res) \
1441 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1442 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1443 ); \
1444 lval = (__typeof__(lval)) _res; \
1445 } while (0)
1446
1447 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1448 arg7) \
1449 do { \
1450 volatile OrigFn _orig = (orig); \
1451 volatile unsigned long _argvec[8]; \
1452 volatile unsigned long _res; \
1453 _argvec[0] = (unsigned long)_orig.nraddr; \
1454 _argvec[1] = (unsigned long)(arg1); \
1455 _argvec[2] = (unsigned long)(arg2); \
1456 _argvec[3] = (unsigned long)(arg3); \
1457 _argvec[4] = (unsigned long)(arg4); \
1458 _argvec[5] = (unsigned long)(arg5); \
1459 _argvec[6] = (unsigned long)(arg6); \
1460 _argvec[7] = (unsigned long)(arg7); \
1461 __asm__ volatile( \
1462 VALGRIND_CFI_PROLOGUE \
1463 "subq $136,%%rsp\n\t" \
1464 "pushq 56(%%rax)\n\t" \
1465 "movq 48(%%rax), %%r9\n\t" \
1466 "movq 40(%%rax), %%r8\n\t" \
1467 "movq 32(%%rax), %%rcx\n\t" \
1468 "movq 24(%%rax), %%rdx\n\t" \
1469 "movq 16(%%rax), %%rsi\n\t" \
1470 "movq 8(%%rax), %%rdi\n\t" \
1471 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1472 VALGRIND_CALL_NOREDIR_RAX \
1473 "addq $8, %%rsp\n" \
1474 "addq $136,%%rsp\n\t" \
1475 VALGRIND_CFI_EPILOGUE \
1476 : /*out*/ "=a" (_res) \
1477 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1478 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1479 ); \
1480 lval = (__typeof__(lval)) _res; \
1481 } while (0)
1482
1483 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1484 arg7,arg8) \
1485 do { \
1486 volatile OrigFn _orig = (orig); \
1487 volatile unsigned long _argvec[9]; \
1488 volatile unsigned long _res; \
1489 _argvec[0] = (unsigned long)_orig.nraddr; \
1490 _argvec[1] = (unsigned long)(arg1); \
1491 _argvec[2] = (unsigned long)(arg2); \
1492 _argvec[3] = (unsigned long)(arg3); \
1493 _argvec[4] = (unsigned long)(arg4); \
1494 _argvec[5] = (unsigned long)(arg5); \
1495 _argvec[6] = (unsigned long)(arg6); \
1496 _argvec[7] = (unsigned long)(arg7); \
1497 _argvec[8] = (unsigned long)(arg8); \
1498 __asm__ volatile( \
1499 VALGRIND_CFI_PROLOGUE \
1500 "subq $128,%%rsp\n\t" \
1501 "pushq 64(%%rax)\n\t" \
1502 "pushq 56(%%rax)\n\t" \
1503 "movq 48(%%rax), %%r9\n\t" \
1504 "movq 40(%%rax), %%r8\n\t" \
1505 "movq 32(%%rax), %%rcx\n\t" \
1506 "movq 24(%%rax), %%rdx\n\t" \
1507 "movq 16(%%rax), %%rsi\n\t" \
1508 "movq 8(%%rax), %%rdi\n\t" \
1509 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1510 VALGRIND_CALL_NOREDIR_RAX \
1511 "addq $16, %%rsp\n" \
1512 "addq $128,%%rsp\n\t" \
1513 VALGRIND_CFI_EPILOGUE \
1514 : /*out*/ "=a" (_res) \
1515 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1516 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1517 ); \
1518 lval = (__typeof__(lval)) _res; \
1519 } while (0)
1520
1521 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1522 arg7,arg8,arg9) \
1523 do { \
1524 volatile OrigFn _orig = (orig); \
1525 volatile unsigned long _argvec[10]; \
1526 volatile unsigned long _res; \
1527 _argvec[0] = (unsigned long)_orig.nraddr; \
1528 _argvec[1] = (unsigned long)(arg1); \
1529 _argvec[2] = (unsigned long)(arg2); \
1530 _argvec[3] = (unsigned long)(arg3); \
1531 _argvec[4] = (unsigned long)(arg4); \
1532 _argvec[5] = (unsigned long)(arg5); \
1533 _argvec[6] = (unsigned long)(arg6); \
1534 _argvec[7] = (unsigned long)(arg7); \
1535 _argvec[8] = (unsigned long)(arg8); \
1536 _argvec[9] = (unsigned long)(arg9); \
1537 __asm__ volatile( \
1538 VALGRIND_CFI_PROLOGUE \
1539 "subq $136,%%rsp\n\t" \
1540 "pushq 72(%%rax)\n\t" \
1541 "pushq 64(%%rax)\n\t" \
1542 "pushq 56(%%rax)\n\t" \
1543 "movq 48(%%rax), %%r9\n\t" \
1544 "movq 40(%%rax), %%r8\n\t" \
1545 "movq 32(%%rax), %%rcx\n\t" \
1546 "movq 24(%%rax), %%rdx\n\t" \
1547 "movq 16(%%rax), %%rsi\n\t" \
1548 "movq 8(%%rax), %%rdi\n\t" \
1549 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1550 VALGRIND_CALL_NOREDIR_RAX \
1551 "addq $24, %%rsp\n" \
1552 "addq $136,%%rsp\n\t" \
1553 VALGRIND_CFI_EPILOGUE \
1554 : /*out*/ "=a" (_res) \
1555 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1556 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1557 ); \
1558 lval = (__typeof__(lval)) _res; \
1559 } while (0)
1560
1561 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1562 arg7,arg8,arg9,arg10) \
1563 do { \
1564 volatile OrigFn _orig = (orig); \
1565 volatile unsigned long _argvec[11]; \
1566 volatile unsigned long _res; \
1567 _argvec[0] = (unsigned long)_orig.nraddr; \
1568 _argvec[1] = (unsigned long)(arg1); \
1569 _argvec[2] = (unsigned long)(arg2); \
1570 _argvec[3] = (unsigned long)(arg3); \
1571 _argvec[4] = (unsigned long)(arg4); \
1572 _argvec[5] = (unsigned long)(arg5); \
1573 _argvec[6] = (unsigned long)(arg6); \
1574 _argvec[7] = (unsigned long)(arg7); \
1575 _argvec[8] = (unsigned long)(arg8); \
1576 _argvec[9] = (unsigned long)(arg9); \
1577 _argvec[10] = (unsigned long)(arg10); \
1578 __asm__ volatile( \
1579 VALGRIND_CFI_PROLOGUE \
1580 "subq $128,%%rsp\n\t" \
1581 "pushq 80(%%rax)\n\t" \
1582 "pushq 72(%%rax)\n\t" \
1583 "pushq 64(%%rax)\n\t" \
1584 "pushq 56(%%rax)\n\t" \
1585 "movq 48(%%rax), %%r9\n\t" \
1586 "movq 40(%%rax), %%r8\n\t" \
1587 "movq 32(%%rax), %%rcx\n\t" \
1588 "movq 24(%%rax), %%rdx\n\t" \
1589 "movq 16(%%rax), %%rsi\n\t" \
1590 "movq 8(%%rax), %%rdi\n\t" \
1591 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1592 VALGRIND_CALL_NOREDIR_RAX \
1593 "addq $32, %%rsp\n" \
1594 "addq $128,%%rsp\n\t" \
1595 VALGRIND_CFI_EPILOGUE \
1596 : /*out*/ "=a" (_res) \
1597 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1598 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1599 ); \
1600 lval = (__typeof__(lval)) _res; \
1601 } while (0)
1602
1603 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1604 arg7,arg8,arg9,arg10,arg11) \
1605 do { \
1606 volatile OrigFn _orig = (orig); \
1607 volatile unsigned long _argvec[12]; \
1608 volatile unsigned long _res; \
1609 _argvec[0] = (unsigned long)_orig.nraddr; \
1610 _argvec[1] = (unsigned long)(arg1); \
1611 _argvec[2] = (unsigned long)(arg2); \
1612 _argvec[3] = (unsigned long)(arg3); \
1613 _argvec[4] = (unsigned long)(arg4); \
1614 _argvec[5] = (unsigned long)(arg5); \
1615 _argvec[6] = (unsigned long)(arg6); \
1616 _argvec[7] = (unsigned long)(arg7); \
1617 _argvec[8] = (unsigned long)(arg8); \
1618 _argvec[9] = (unsigned long)(arg9); \
1619 _argvec[10] = (unsigned long)(arg10); \
1620 _argvec[11] = (unsigned long)(arg11); \
1621 __asm__ volatile( \
1622 VALGRIND_CFI_PROLOGUE \
1623 "subq $136,%%rsp\n\t" \
1624 "pushq 88(%%rax)\n\t" \
1625 "pushq 80(%%rax)\n\t" \
1626 "pushq 72(%%rax)\n\t" \
1627 "pushq 64(%%rax)\n\t" \
1628 "pushq 56(%%rax)\n\t" \
1629 "movq 48(%%rax), %%r9\n\t" \
1630 "movq 40(%%rax), %%r8\n\t" \
1631 "movq 32(%%rax), %%rcx\n\t" \
1632 "movq 24(%%rax), %%rdx\n\t" \
1633 "movq 16(%%rax), %%rsi\n\t" \
1634 "movq 8(%%rax), %%rdi\n\t" \
1635 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1636 VALGRIND_CALL_NOREDIR_RAX \
1637 "addq $40, %%rsp\n" \
1638 "addq $136,%%rsp\n\t" \
1639 VALGRIND_CFI_EPILOGUE \
1640 : /*out*/ "=a" (_res) \
1641 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1642 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1643 ); \
1644 lval = (__typeof__(lval)) _res; \
1645 } while (0)
1646
1647 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1648 arg7,arg8,arg9,arg10,arg11,arg12) \
1649 do { \
1650 volatile OrigFn _orig = (orig); \
1651 volatile unsigned long _argvec[13]; \
1652 volatile unsigned long _res; \
1653 _argvec[0] = (unsigned long)_orig.nraddr; \
1654 _argvec[1] = (unsigned long)(arg1); \
1655 _argvec[2] = (unsigned long)(arg2); \
1656 _argvec[3] = (unsigned long)(arg3); \
1657 _argvec[4] = (unsigned long)(arg4); \
1658 _argvec[5] = (unsigned long)(arg5); \
1659 _argvec[6] = (unsigned long)(arg6); \
1660 _argvec[7] = (unsigned long)(arg7); \
1661 _argvec[8] = (unsigned long)(arg8); \
1662 _argvec[9] = (unsigned long)(arg9); \
1663 _argvec[10] = (unsigned long)(arg10); \
1664 _argvec[11] = (unsigned long)(arg11); \
1665 _argvec[12] = (unsigned long)(arg12); \
1666 __asm__ volatile( \
1667 VALGRIND_CFI_PROLOGUE \
1668 "subq $128,%%rsp\n\t" \
1669 "pushq 96(%%rax)\n\t" \
1670 "pushq 88(%%rax)\n\t" \
1671 "pushq 80(%%rax)\n\t" \
1672 "pushq 72(%%rax)\n\t" \
1673 "pushq 64(%%rax)\n\t" \
1674 "pushq 56(%%rax)\n\t" \
1675 "movq 48(%%rax), %%r9\n\t" \
1676 "movq 40(%%rax), %%r8\n\t" \
1677 "movq 32(%%rax), %%rcx\n\t" \
1678 "movq 24(%%rax), %%rdx\n\t" \
1679 "movq 16(%%rax), %%rsi\n\t" \
1680 "movq 8(%%rax), %%rdi\n\t" \
1681 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1682 VALGRIND_CALL_NOREDIR_RAX \
1683 "addq $48, %%rsp\n" \
1684 "addq $128,%%rsp\n\t" \
1685 VALGRIND_CFI_EPILOGUE \
1686 : /*out*/ "=a" (_res) \
1687 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1688 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r15" \
1689 ); \
1690 lval = (__typeof__(lval)) _res; \
1691 } while (0)
1692
1693 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
1694
1695 /* ------------------------ ppc32-linux ------------------------ */
1696
1697 #if defined(PLAT_ppc32_linux)
1698
1699 /* This is useful for finding out about the on-stack stuff:
1700
1701 extern int f9 ( int,int,int,int,int,int,int,int,int );
1702 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
1703 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
1704 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
1705
1706 int g9 ( void ) {
1707 return f9(11,22,33,44,55,66,77,88,99);
1708 }
1709 int g10 ( void ) {
1710 return f10(11,22,33,44,55,66,77,88,99,110);
1711 }
1712 int g11 ( void ) {
1713 return f11(11,22,33,44,55,66,77,88,99,110,121);
1714 }
1715 int g12 ( void ) {
1716 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
1717 }
1718 */
1719
1720 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
1721
1722 /* These regs are trashed by the hidden call. */
1723 #define __CALLER_SAVED_REGS \
1724 "lr", "ctr", "xer", \
1725 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
1726 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
1727 "r11", "r12", "r13"
1728
1729 /* These CALL_FN_ macros assume that on ppc32-linux,
1730 sizeof(unsigned long) == 4. */
1731
1732 #define CALL_FN_W_v(lval, orig) \
1733 do { \
1734 volatile OrigFn _orig = (orig); \
1735 volatile unsigned long _argvec[1]; \
1736 volatile unsigned long _res; \
1737 _argvec[0] = (unsigned long)_orig.nraddr; \
1738 __asm__ volatile( \
1739 "mr 11,%1\n\t" \
1740 "lwz 11,0(11)\n\t" /* target->r11 */ \
1741 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1742 "mr %0,3" \
1743 : /*out*/ "=r" (_res) \
1744 : /*in*/ "r" (&_argvec[0]) \
1745 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1746 ); \
1747 lval = (__typeof__(lval)) _res; \
1748 } while (0)
1749
1750 #define CALL_FN_W_W(lval, orig, arg1) \
1751 do { \
1752 volatile OrigFn _orig = (orig); \
1753 volatile unsigned long _argvec[2]; \
1754 volatile unsigned long _res; \
1755 _argvec[0] = (unsigned long)_orig.nraddr; \
1756 _argvec[1] = (unsigned long)arg1; \
1757 __asm__ volatile( \
1758 "mr 11,%1\n\t" \
1759 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1760 "lwz 11,0(11)\n\t" /* target->r11 */ \
1761 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1762 "mr %0,3" \
1763 : /*out*/ "=r" (_res) \
1764 : /*in*/ "r" (&_argvec[0]) \
1765 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1766 ); \
1767 lval = (__typeof__(lval)) _res; \
1768 } while (0)
1769
1770 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1771 do { \
1772 volatile OrigFn _orig = (orig); \
1773 volatile unsigned long _argvec[3]; \
1774 volatile unsigned long _res; \
1775 _argvec[0] = (unsigned long)_orig.nraddr; \
1776 _argvec[1] = (unsigned long)arg1; \
1777 _argvec[2] = (unsigned long)arg2; \
1778 __asm__ volatile( \
1779 "mr 11,%1\n\t" \
1780 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1781 "lwz 4,8(11)\n\t" \
1782 "lwz 11,0(11)\n\t" /* target->r11 */ \
1783 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1784 "mr %0,3" \
1785 : /*out*/ "=r" (_res) \
1786 : /*in*/ "r" (&_argvec[0]) \
1787 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1788 ); \
1789 lval = (__typeof__(lval)) _res; \
1790 } while (0)
1791
1792 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1793 do { \
1794 volatile OrigFn _orig = (orig); \
1795 volatile unsigned long _argvec[4]; \
1796 volatile unsigned long _res; \
1797 _argvec[0] = (unsigned long)_orig.nraddr; \
1798 _argvec[1] = (unsigned long)arg1; \
1799 _argvec[2] = (unsigned long)arg2; \
1800 _argvec[3] = (unsigned long)arg3; \
1801 __asm__ volatile( \
1802 "mr 11,%1\n\t" \
1803 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1804 "lwz 4,8(11)\n\t" \
1805 "lwz 5,12(11)\n\t" \
1806 "lwz 11,0(11)\n\t" /* target->r11 */ \
1807 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1808 "mr %0,3" \
1809 : /*out*/ "=r" (_res) \
1810 : /*in*/ "r" (&_argvec[0]) \
1811 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1812 ); \
1813 lval = (__typeof__(lval)) _res; \
1814 } while (0)
1815
1816 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1817 do { \
1818 volatile OrigFn _orig = (orig); \
1819 volatile unsigned long _argvec[5]; \
1820 volatile unsigned long _res; \
1821 _argvec[0] = (unsigned long)_orig.nraddr; \
1822 _argvec[1] = (unsigned long)arg1; \
1823 _argvec[2] = (unsigned long)arg2; \
1824 _argvec[3] = (unsigned long)arg3; \
1825 _argvec[4] = (unsigned long)arg4; \
1826 __asm__ volatile( \
1827 "mr 11,%1\n\t" \
1828 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1829 "lwz 4,8(11)\n\t" \
1830 "lwz 5,12(11)\n\t" \
1831 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1832 "lwz 11,0(11)\n\t" /* target->r11 */ \
1833 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1834 "mr %0,3" \
1835 : /*out*/ "=r" (_res) \
1836 : /*in*/ "r" (&_argvec[0]) \
1837 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1838 ); \
1839 lval = (__typeof__(lval)) _res; \
1840 } while (0)
1841
1842 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1843 do { \
1844 volatile OrigFn _orig = (orig); \
1845 volatile unsigned long _argvec[6]; \
1846 volatile unsigned long _res; \
1847 _argvec[0] = (unsigned long)_orig.nraddr; \
1848 _argvec[1] = (unsigned long)arg1; \
1849 _argvec[2] = (unsigned long)arg2; \
1850 _argvec[3] = (unsigned long)arg3; \
1851 _argvec[4] = (unsigned long)arg4; \
1852 _argvec[5] = (unsigned long)arg5; \
1853 __asm__ volatile( \
1854 "mr 11,%1\n\t" \
1855 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1856 "lwz 4,8(11)\n\t" \
1857 "lwz 5,12(11)\n\t" \
1858 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1859 "lwz 7,20(11)\n\t" \
1860 "lwz 11,0(11)\n\t" /* target->r11 */ \
1861 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1862 "mr %0,3" \
1863 : /*out*/ "=r" (_res) \
1864 : /*in*/ "r" (&_argvec[0]) \
1865 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1866 ); \
1867 lval = (__typeof__(lval)) _res; \
1868 } while (0)
1869
1870 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1871 do { \
1872 volatile OrigFn _orig = (orig); \
1873 volatile unsigned long _argvec[7]; \
1874 volatile unsigned long _res; \
1875 _argvec[0] = (unsigned long)_orig.nraddr; \
1876 _argvec[1] = (unsigned long)arg1; \
1877 _argvec[2] = (unsigned long)arg2; \
1878 _argvec[3] = (unsigned long)arg3; \
1879 _argvec[4] = (unsigned long)arg4; \
1880 _argvec[5] = (unsigned long)arg5; \
1881 _argvec[6] = (unsigned long)arg6; \
1882 __asm__ volatile( \
1883 "mr 11,%1\n\t" \
1884 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1885 "lwz 4,8(11)\n\t" \
1886 "lwz 5,12(11)\n\t" \
1887 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1888 "lwz 7,20(11)\n\t" \
1889 "lwz 8,24(11)\n\t" \
1890 "lwz 11,0(11)\n\t" /* target->r11 */ \
1891 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1892 "mr %0,3" \
1893 : /*out*/ "=r" (_res) \
1894 : /*in*/ "r" (&_argvec[0]) \
1895 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1896 ); \
1897 lval = (__typeof__(lval)) _res; \
1898 } while (0)
1899
1900 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1901 arg7) \
1902 do { \
1903 volatile OrigFn _orig = (orig); \
1904 volatile unsigned long _argvec[8]; \
1905 volatile unsigned long _res; \
1906 _argvec[0] = (unsigned long)_orig.nraddr; \
1907 _argvec[1] = (unsigned long)arg1; \
1908 _argvec[2] = (unsigned long)arg2; \
1909 _argvec[3] = (unsigned long)arg3; \
1910 _argvec[4] = (unsigned long)arg4; \
1911 _argvec[5] = (unsigned long)arg5; \
1912 _argvec[6] = (unsigned long)arg6; \
1913 _argvec[7] = (unsigned long)arg7; \
1914 __asm__ volatile( \
1915 "mr 11,%1\n\t" \
1916 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1917 "lwz 4,8(11)\n\t" \
1918 "lwz 5,12(11)\n\t" \
1919 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1920 "lwz 7,20(11)\n\t" \
1921 "lwz 8,24(11)\n\t" \
1922 "lwz 9,28(11)\n\t" \
1923 "lwz 11,0(11)\n\t" /* target->r11 */ \
1924 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1925 "mr %0,3" \
1926 : /*out*/ "=r" (_res) \
1927 : /*in*/ "r" (&_argvec[0]) \
1928 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1929 ); \
1930 lval = (__typeof__(lval)) _res; \
1931 } while (0)
1932
1933 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1934 arg7,arg8) \
1935 do { \
1936 volatile OrigFn _orig = (orig); \
1937 volatile unsigned long _argvec[9]; \
1938 volatile unsigned long _res; \
1939 _argvec[0] = (unsigned long)_orig.nraddr; \
1940 _argvec[1] = (unsigned long)arg1; \
1941 _argvec[2] = (unsigned long)arg2; \
1942 _argvec[3] = (unsigned long)arg3; \
1943 _argvec[4] = (unsigned long)arg4; \
1944 _argvec[5] = (unsigned long)arg5; \
1945 _argvec[6] = (unsigned long)arg6; \
1946 _argvec[7] = (unsigned long)arg7; \
1947 _argvec[8] = (unsigned long)arg8; \
1948 __asm__ volatile( \
1949 "mr 11,%1\n\t" \
1950 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1951 "lwz 4,8(11)\n\t" \
1952 "lwz 5,12(11)\n\t" \
1953 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1954 "lwz 7,20(11)\n\t" \
1955 "lwz 8,24(11)\n\t" \
1956 "lwz 9,28(11)\n\t" \
1957 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
1958 "lwz 11,0(11)\n\t" /* target->r11 */ \
1959 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1960 "mr %0,3" \
1961 : /*out*/ "=r" (_res) \
1962 : /*in*/ "r" (&_argvec[0]) \
1963 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
1964 ); \
1965 lval = (__typeof__(lval)) _res; \
1966 } while (0)
1967
1968 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1969 arg7,arg8,arg9) \
1970 do { \
1971 volatile OrigFn _orig = (orig); \
1972 volatile unsigned long _argvec[10]; \
1973 volatile unsigned long _res; \
1974 _argvec[0] = (unsigned long)_orig.nraddr; \
1975 _argvec[1] = (unsigned long)arg1; \
1976 _argvec[2] = (unsigned long)arg2; \
1977 _argvec[3] = (unsigned long)arg3; \
1978 _argvec[4] = (unsigned long)arg4; \
1979 _argvec[5] = (unsigned long)arg5; \
1980 _argvec[6] = (unsigned long)arg6; \
1981 _argvec[7] = (unsigned long)arg7; \
1982 _argvec[8] = (unsigned long)arg8; \
1983 _argvec[9] = (unsigned long)arg9; \
1984 __asm__ volatile( \
1985 "mr 11,%1\n\t" \
1986 "addi 1,1,-16\n\t" \
1987 /* arg9 */ \
1988 "lwz 3,36(11)\n\t" \
1989 "stw 3,8(1)\n\t" \
1990 /* args1-8 */ \
1991 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1992 "lwz 4,8(11)\n\t" \
1993 "lwz 5,12(11)\n\t" \
1994 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1995 "lwz 7,20(11)\n\t" \
1996 "lwz 8,24(11)\n\t" \
1997 "lwz 9,28(11)\n\t" \
1998 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
1999 "lwz 11,0(11)\n\t" /* target->r11 */ \
2000 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2001 "addi 1,1,16\n\t" \
2002 "mr %0,3" \
2003 : /*out*/ "=r" (_res) \
2004 : /*in*/ "r" (&_argvec[0]) \
2005 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2006 ); \
2007 lval = (__typeof__(lval)) _res; \
2008 } while (0)
2009
2010 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2011 arg7,arg8,arg9,arg10) \
2012 do { \
2013 volatile OrigFn _orig = (orig); \
2014 volatile unsigned long _argvec[11]; \
2015 volatile unsigned long _res; \
2016 _argvec[0] = (unsigned long)_orig.nraddr; \
2017 _argvec[1] = (unsigned long)arg1; \
2018 _argvec[2] = (unsigned long)arg2; \
2019 _argvec[3] = (unsigned long)arg3; \
2020 _argvec[4] = (unsigned long)arg4; \
2021 _argvec[5] = (unsigned long)arg5; \
2022 _argvec[6] = (unsigned long)arg6; \
2023 _argvec[7] = (unsigned long)arg7; \
2024 _argvec[8] = (unsigned long)arg8; \
2025 _argvec[9] = (unsigned long)arg9; \
2026 _argvec[10] = (unsigned long)arg10; \
2027 __asm__ volatile( \
2028 "mr 11,%1\n\t" \
2029 "addi 1,1,-16\n\t" \
2030 /* arg10 */ \
2031 "lwz 3,40(11)\n\t" \
2032 "stw 3,12(1)\n\t" \
2033 /* arg9 */ \
2034 "lwz 3,36(11)\n\t" \
2035 "stw 3,8(1)\n\t" \
2036 /* args1-8 */ \
2037 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2038 "lwz 4,8(11)\n\t" \
2039 "lwz 5,12(11)\n\t" \
2040 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2041 "lwz 7,20(11)\n\t" \
2042 "lwz 8,24(11)\n\t" \
2043 "lwz 9,28(11)\n\t" \
2044 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2045 "lwz 11,0(11)\n\t" /* target->r11 */ \
2046 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2047 "addi 1,1,16\n\t" \
2048 "mr %0,3" \
2049 : /*out*/ "=r" (_res) \
2050 : /*in*/ "r" (&_argvec[0]) \
2051 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2052 ); \
2053 lval = (__typeof__(lval)) _res; \
2054 } while (0)
2055
2056 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2057 arg7,arg8,arg9,arg10,arg11) \
2058 do { \
2059 volatile OrigFn _orig = (orig); \
2060 volatile unsigned long _argvec[12]; \
2061 volatile unsigned long _res; \
2062 _argvec[0] = (unsigned long)_orig.nraddr; \
2063 _argvec[1] = (unsigned long)arg1; \
2064 _argvec[2] = (unsigned long)arg2; \
2065 _argvec[3] = (unsigned long)arg3; \
2066 _argvec[4] = (unsigned long)arg4; \
2067 _argvec[5] = (unsigned long)arg5; \
2068 _argvec[6] = (unsigned long)arg6; \
2069 _argvec[7] = (unsigned long)arg7; \
2070 _argvec[8] = (unsigned long)arg8; \
2071 _argvec[9] = (unsigned long)arg9; \
2072 _argvec[10] = (unsigned long)arg10; \
2073 _argvec[11] = (unsigned long)arg11; \
2074 __asm__ volatile( \
2075 "mr 11,%1\n\t" \
2076 "addi 1,1,-32\n\t" \
2077 /* arg11 */ \
2078 "lwz 3,44(11)\n\t" \
2079 "stw 3,16(1)\n\t" \
2080 /* arg10 */ \
2081 "lwz 3,40(11)\n\t" \
2082 "stw 3,12(1)\n\t" \
2083 /* arg9 */ \
2084 "lwz 3,36(11)\n\t" \
2085 "stw 3,8(1)\n\t" \
2086 /* args1-8 */ \
2087 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2088 "lwz 4,8(11)\n\t" \
2089 "lwz 5,12(11)\n\t" \
2090 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2091 "lwz 7,20(11)\n\t" \
2092 "lwz 8,24(11)\n\t" \
2093 "lwz 9,28(11)\n\t" \
2094 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2095 "lwz 11,0(11)\n\t" /* target->r11 */ \
2096 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2097 "addi 1,1,32\n\t" \
2098 "mr %0,3" \
2099 : /*out*/ "=r" (_res) \
2100 : /*in*/ "r" (&_argvec[0]) \
2101 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2102 ); \
2103 lval = (__typeof__(lval)) _res; \
2104 } while (0)
2105
2106 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2107 arg7,arg8,arg9,arg10,arg11,arg12) \
2108 do { \
2109 volatile OrigFn _orig = (orig); \
2110 volatile unsigned long _argvec[13]; \
2111 volatile unsigned long _res; \
2112 _argvec[0] = (unsigned long)_orig.nraddr; \
2113 _argvec[1] = (unsigned long)arg1; \
2114 _argvec[2] = (unsigned long)arg2; \
2115 _argvec[3] = (unsigned long)arg3; \
2116 _argvec[4] = (unsigned long)arg4; \
2117 _argvec[5] = (unsigned long)arg5; \
2118 _argvec[6] = (unsigned long)arg6; \
2119 _argvec[7] = (unsigned long)arg7; \
2120 _argvec[8] = (unsigned long)arg8; \
2121 _argvec[9] = (unsigned long)arg9; \
2122 _argvec[10] = (unsigned long)arg10; \
2123 _argvec[11] = (unsigned long)arg11; \
2124 _argvec[12] = (unsigned long)arg12; \
2125 __asm__ volatile( \
2126 "mr 11,%1\n\t" \
2127 "addi 1,1,-32\n\t" \
2128 /* arg12 */ \
2129 "lwz 3,48(11)\n\t" \
2130 "stw 3,20(1)\n\t" \
2131 /* arg11 */ \
2132 "lwz 3,44(11)\n\t" \
2133 "stw 3,16(1)\n\t" \
2134 /* arg10 */ \
2135 "lwz 3,40(11)\n\t" \
2136 "stw 3,12(1)\n\t" \
2137 /* arg9 */ \
2138 "lwz 3,36(11)\n\t" \
2139 "stw 3,8(1)\n\t" \
2140 /* args1-8 */ \
2141 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2142 "lwz 4,8(11)\n\t" \
2143 "lwz 5,12(11)\n\t" \
2144 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2145 "lwz 7,20(11)\n\t" \
2146 "lwz 8,24(11)\n\t" \
2147 "lwz 9,28(11)\n\t" \
2148 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2149 "lwz 11,0(11)\n\t" /* target->r11 */ \
2150 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2151 "addi 1,1,32\n\t" \
2152 "mr %0,3" \
2153 : /*out*/ "=r" (_res) \
2154 : /*in*/ "r" (&_argvec[0]) \
2155 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2156 ); \
2157 lval = (__typeof__(lval)) _res; \
2158 } while (0)
2159
2160 #endif /* PLAT_ppc32_linux */
2161
2162 /* ------------------------ ppc64-linux ------------------------ */
2163
2164 #if defined(PLAT_ppc64_linux)
2165
2166 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2167
2168 /* These regs are trashed by the hidden call. */
2169 #define __CALLER_SAVED_REGS \
2170 "lr", "ctr", "xer", \
2171 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2172 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2173 "r11", "r12", "r13"
2174
2175 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2176 long) == 8. */
2177
2178 #define CALL_FN_W_v(lval, orig) \
2179 do { \
2180 volatile OrigFn _orig = (orig); \
2181 volatile unsigned long _argvec[3+0]; \
2182 volatile unsigned long _res; \
2183 /* _argvec[0] holds current r2 across the call */ \
2184 _argvec[1] = (unsigned long)_orig.r2; \
2185 _argvec[2] = (unsigned long)_orig.nraddr; \
2186 __asm__ volatile( \
2187 "mr 11,%1\n\t" \
2188 "std 2,-16(11)\n\t" /* save tocptr */ \
2189 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2190 "ld 11, 0(11)\n\t" /* target->r11 */ \
2191 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2192 "mr 11,%1\n\t" \
2193 "mr %0,3\n\t" \
2194 "ld 2,-16(11)" /* restore tocptr */ \
2195 : /*out*/ "=r" (_res) \
2196 : /*in*/ "r" (&_argvec[2]) \
2197 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2198 ); \
2199 lval = (__typeof__(lval)) _res; \
2200 } while (0)
2201
2202 #define CALL_FN_W_W(lval, orig, arg1) \
2203 do { \
2204 volatile OrigFn _orig = (orig); \
2205 volatile unsigned long _argvec[3+1]; \
2206 volatile unsigned long _res; \
2207 /* _argvec[0] holds current r2 across the call */ \
2208 _argvec[1] = (unsigned long)_orig.r2; \
2209 _argvec[2] = (unsigned long)_orig.nraddr; \
2210 _argvec[2+1] = (unsigned long)arg1; \
2211 __asm__ volatile( \
2212 "mr 11,%1\n\t" \
2213 "std 2,-16(11)\n\t" /* save tocptr */ \
2214 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2215 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2216 "ld 11, 0(11)\n\t" /* target->r11 */ \
2217 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2218 "mr 11,%1\n\t" \
2219 "mr %0,3\n\t" \
2220 "ld 2,-16(11)" /* restore tocptr */ \
2221 : /*out*/ "=r" (_res) \
2222 : /*in*/ "r" (&_argvec[2]) \
2223 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2224 ); \
2225 lval = (__typeof__(lval)) _res; \
2226 } while (0)
2227
2228 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2229 do { \
2230 volatile OrigFn _orig = (orig); \
2231 volatile unsigned long _argvec[3+2]; \
2232 volatile unsigned long _res; \
2233 /* _argvec[0] holds current r2 across the call */ \
2234 _argvec[1] = (unsigned long)_orig.r2; \
2235 _argvec[2] = (unsigned long)_orig.nraddr; \
2236 _argvec[2+1] = (unsigned long)arg1; \
2237 _argvec[2+2] = (unsigned long)arg2; \
2238 __asm__ volatile( \
2239 "mr 11,%1\n\t" \
2240 "std 2,-16(11)\n\t" /* save tocptr */ \
2241 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2242 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2243 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2244 "ld 11, 0(11)\n\t" /* target->r11 */ \
2245 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2246 "mr 11,%1\n\t" \
2247 "mr %0,3\n\t" \
2248 "ld 2,-16(11)" /* restore tocptr */ \
2249 : /*out*/ "=r" (_res) \
2250 : /*in*/ "r" (&_argvec[2]) \
2251 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2252 ); \
2253 lval = (__typeof__(lval)) _res; \
2254 } while (0)
2255
2256 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2257 do { \
2258 volatile OrigFn _orig = (orig); \
2259 volatile unsigned long _argvec[3+3]; \
2260 volatile unsigned long _res; \
2261 /* _argvec[0] holds current r2 across the call */ \
2262 _argvec[1] = (unsigned long)_orig.r2; \
2263 _argvec[2] = (unsigned long)_orig.nraddr; \
2264 _argvec[2+1] = (unsigned long)arg1; \
2265 _argvec[2+2] = (unsigned long)arg2; \
2266 _argvec[2+3] = (unsigned long)arg3; \
2267 __asm__ volatile( \
2268 "mr 11,%1\n\t" \
2269 "std 2,-16(11)\n\t" /* save tocptr */ \
2270 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2271 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2272 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2273 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2274 "ld 11, 0(11)\n\t" /* target->r11 */ \
2275 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2276 "mr 11,%1\n\t" \
2277 "mr %0,3\n\t" \
2278 "ld 2,-16(11)" /* restore tocptr */ \
2279 : /*out*/ "=r" (_res) \
2280 : /*in*/ "r" (&_argvec[2]) \
2281 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2282 ); \
2283 lval = (__typeof__(lval)) _res; \
2284 } while (0)
2285
2286 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2287 do { \
2288 volatile OrigFn _orig = (orig); \
2289 volatile unsigned long _argvec[3+4]; \
2290 volatile unsigned long _res; \
2291 /* _argvec[0] holds current r2 across the call */ \
2292 _argvec[1] = (unsigned long)_orig.r2; \
2293 _argvec[2] = (unsigned long)_orig.nraddr; \
2294 _argvec[2+1] = (unsigned long)arg1; \
2295 _argvec[2+2] = (unsigned long)arg2; \
2296 _argvec[2+3] = (unsigned long)arg3; \
2297 _argvec[2+4] = (unsigned long)arg4; \
2298 __asm__ volatile( \
2299 "mr 11,%1\n\t" \
2300 "std 2,-16(11)\n\t" /* save tocptr */ \
2301 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2302 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2303 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2304 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2305 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2306 "ld 11, 0(11)\n\t" /* target->r11 */ \
2307 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2308 "mr 11,%1\n\t" \
2309 "mr %0,3\n\t" \
2310 "ld 2,-16(11)" /* restore tocptr */ \
2311 : /*out*/ "=r" (_res) \
2312 : /*in*/ "r" (&_argvec[2]) \
2313 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2314 ); \
2315 lval = (__typeof__(lval)) _res; \
2316 } while (0)
2317
2318 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2319 do { \
2320 volatile OrigFn _orig = (orig); \
2321 volatile unsigned long _argvec[3+5]; \
2322 volatile unsigned long _res; \
2323 /* _argvec[0] holds current r2 across the call */ \
2324 _argvec[1] = (unsigned long)_orig.r2; \
2325 _argvec[2] = (unsigned long)_orig.nraddr; \
2326 _argvec[2+1] = (unsigned long)arg1; \
2327 _argvec[2+2] = (unsigned long)arg2; \
2328 _argvec[2+3] = (unsigned long)arg3; \
2329 _argvec[2+4] = (unsigned long)arg4; \
2330 _argvec[2+5] = (unsigned long)arg5; \
2331 __asm__ volatile( \
2332 "mr 11,%1\n\t" \
2333 "std 2,-16(11)\n\t" /* save tocptr */ \
2334 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2335 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2336 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2337 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2338 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2339 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2340 "ld 11, 0(11)\n\t" /* target->r11 */ \
2341 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2342 "mr 11,%1\n\t" \
2343 "mr %0,3\n\t" \
2344 "ld 2,-16(11)" /* restore tocptr */ \
2345 : /*out*/ "=r" (_res) \
2346 : /*in*/ "r" (&_argvec[2]) \
2347 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2348 ); \
2349 lval = (__typeof__(lval)) _res; \
2350 } while (0)
2351
2352 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2353 do { \
2354 volatile OrigFn _orig = (orig); \
2355 volatile unsigned long _argvec[3+6]; \
2356 volatile unsigned long _res; \
2357 /* _argvec[0] holds current r2 across the call */ \
2358 _argvec[1] = (unsigned long)_orig.r2; \
2359 _argvec[2] = (unsigned long)_orig.nraddr; \
2360 _argvec[2+1] = (unsigned long)arg1; \
2361 _argvec[2+2] = (unsigned long)arg2; \
2362 _argvec[2+3] = (unsigned long)arg3; \
2363 _argvec[2+4] = (unsigned long)arg4; \
2364 _argvec[2+5] = (unsigned long)arg5; \
2365 _argvec[2+6] = (unsigned long)arg6; \
2366 __asm__ volatile( \
2367 "mr 11,%1\n\t" \
2368 "std 2,-16(11)\n\t" /* save tocptr */ \
2369 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2370 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2371 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2372 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2373 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2374 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2375 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2376 "ld 11, 0(11)\n\t" /* target->r11 */ \
2377 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2378 "mr 11,%1\n\t" \
2379 "mr %0,3\n\t" \
2380 "ld 2,-16(11)" /* restore tocptr */ \
2381 : /*out*/ "=r" (_res) \
2382 : /*in*/ "r" (&_argvec[2]) \
2383 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2384 ); \
2385 lval = (__typeof__(lval)) _res; \
2386 } while (0)
2387
2388 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2389 arg7) \
2390 do { \
2391 volatile OrigFn _orig = (orig); \
2392 volatile unsigned long _argvec[3+7]; \
2393 volatile unsigned long _res; \
2394 /* _argvec[0] holds current r2 across the call */ \
2395 _argvec[1] = (unsigned long)_orig.r2; \
2396 _argvec[2] = (unsigned long)_orig.nraddr; \
2397 _argvec[2+1] = (unsigned long)arg1; \
2398 _argvec[2+2] = (unsigned long)arg2; \
2399 _argvec[2+3] = (unsigned long)arg3; \
2400 _argvec[2+4] = (unsigned long)arg4; \
2401 _argvec[2+5] = (unsigned long)arg5; \
2402 _argvec[2+6] = (unsigned long)arg6; \
2403 _argvec[2+7] = (unsigned long)arg7; \
2404 __asm__ volatile( \
2405 "mr 11,%1\n\t" \
2406 "std 2,-16(11)\n\t" /* save tocptr */ \
2407 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2408 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2409 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2410 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2411 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2412 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2413 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2414 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2415 "ld 11, 0(11)\n\t" /* target->r11 */ \
2416 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2417 "mr 11,%1\n\t" \
2418 "mr %0,3\n\t" \
2419 "ld 2,-16(11)" /* restore tocptr */ \
2420 : /*out*/ "=r" (_res) \
2421 : /*in*/ "r" (&_argvec[2]) \
2422 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2423 ); \
2424 lval = (__typeof__(lval)) _res; \
2425 } while (0)
2426
2427 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2428 arg7,arg8) \
2429 do { \
2430 volatile OrigFn _orig = (orig); \
2431 volatile unsigned long _argvec[3+8]; \
2432 volatile unsigned long _res; \
2433 /* _argvec[0] holds current r2 across the call */ \
2434 _argvec[1] = (unsigned long)_orig.r2; \
2435 _argvec[2] = (unsigned long)_orig.nraddr; \
2436 _argvec[2+1] = (unsigned long)arg1; \
2437 _argvec[2+2] = (unsigned long)arg2; \
2438 _argvec[2+3] = (unsigned long)arg3; \
2439 _argvec[2+4] = (unsigned long)arg4; \
2440 _argvec[2+5] = (unsigned long)arg5; \
2441 _argvec[2+6] = (unsigned long)arg6; \
2442 _argvec[2+7] = (unsigned long)arg7; \
2443 _argvec[2+8] = (unsigned long)arg8; \
2444 __asm__ volatile( \
2445 "mr 11,%1\n\t" \
2446 "std 2,-16(11)\n\t" /* save tocptr */ \
2447 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2448 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2449 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2450 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2451 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2452 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2453 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2454 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2455 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2456 "ld 11, 0(11)\n\t" /* target->r11 */ \
2457 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2458 "mr 11,%1\n\t" \
2459 "mr %0,3\n\t" \
2460 "ld 2,-16(11)" /* restore tocptr */ \
2461 : /*out*/ "=r" (_res) \
2462 : /*in*/ "r" (&_argvec[2]) \
2463 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2464 ); \
2465 lval = (__typeof__(lval)) _res; \
2466 } while (0)
2467
2468 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2469 arg7,arg8,arg9) \
2470 do { \
2471 volatile OrigFn _orig = (orig); \
2472 volatile unsigned long _argvec[3+9]; \
2473 volatile unsigned long _res; \
2474 /* _argvec[0] holds current r2 across the call */ \
2475 _argvec[1] = (unsigned long)_orig.r2; \
2476 _argvec[2] = (unsigned long)_orig.nraddr; \
2477 _argvec[2+1] = (unsigned long)arg1; \
2478 _argvec[2+2] = (unsigned long)arg2; \
2479 _argvec[2+3] = (unsigned long)arg3; \
2480 _argvec[2+4] = (unsigned long)arg4; \
2481 _argvec[2+5] = (unsigned long)arg5; \
2482 _argvec[2+6] = (unsigned long)arg6; \
2483 _argvec[2+7] = (unsigned long)arg7; \
2484 _argvec[2+8] = (unsigned long)arg8; \
2485 _argvec[2+9] = (unsigned long)arg9; \
2486 __asm__ volatile( \
2487 "mr 11,%1\n\t" \
2488 "std 2,-16(11)\n\t" /* save tocptr */ \
2489 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2490 "addi 1,1,-128\n\t" /* expand stack frame */ \
2491 /* arg9 */ \
2492 "ld 3,72(11)\n\t" \
2493 "std 3,112(1)\n\t" \
2494 /* args1-8 */ \
2495 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2496 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2497 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2498 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2499 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2500 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2501 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2502 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2503 "ld 11, 0(11)\n\t" /* target->r11 */ \
2504 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2505 "mr 11,%1\n\t" \
2506 "mr %0,3\n\t" \
2507 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2508 "addi 1,1,128" /* restore frame */ \
2509 : /*out*/ "=r" (_res) \
2510 : /*in*/ "r" (&_argvec[2]) \
2511 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2512 ); \
2513 lval = (__typeof__(lval)) _res; \
2514 } while (0)
2515
2516 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2517 arg7,arg8,arg9,arg10) \
2518 do { \
2519 volatile OrigFn _orig = (orig); \
2520 volatile unsigned long _argvec[3+10]; \
2521 volatile unsigned long _res; \
2522 /* _argvec[0] holds current r2 across the call */ \
2523 _argvec[1] = (unsigned long)_orig.r2; \
2524 _argvec[2] = (unsigned long)_orig.nraddr; \
2525 _argvec[2+1] = (unsigned long)arg1; \
2526 _argvec[2+2] = (unsigned long)arg2; \
2527 _argvec[2+3] = (unsigned long)arg3; \
2528 _argvec[2+4] = (unsigned long)arg4; \
2529 _argvec[2+5] = (unsigned long)arg5; \
2530 _argvec[2+6] = (unsigned long)arg6; \
2531 _argvec[2+7] = (unsigned long)arg7; \
2532 _argvec[2+8] = (unsigned long)arg8; \
2533 _argvec[2+9] = (unsigned long)arg9; \
2534 _argvec[2+10] = (unsigned long)arg10; \
2535 __asm__ volatile( \
2536 "mr 11,%1\n\t" \
2537 "std 2,-16(11)\n\t" /* save tocptr */ \
2538 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2539 "addi 1,1,-128\n\t" /* expand stack frame */ \
2540 /* arg10 */ \
2541 "ld 3,80(11)\n\t" \
2542 "std 3,120(1)\n\t" \
2543 /* arg9 */ \
2544 "ld 3,72(11)\n\t" \
2545 "std 3,112(1)\n\t" \
2546 /* args1-8 */ \
2547 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2548 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2549 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2550 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2551 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2552 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2553 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2554 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2555 "ld 11, 0(11)\n\t" /* target->r11 */ \
2556 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2557 "mr 11,%1\n\t" \
2558 "mr %0,3\n\t" \
2559 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2560 "addi 1,1,128" /* restore frame */ \
2561 : /*out*/ "=r" (_res) \
2562 : /*in*/ "r" (&_argvec[2]) \
2563 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2564 ); \
2565 lval = (__typeof__(lval)) _res; \
2566 } while (0)
2567
2568 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2569 arg7,arg8,arg9,arg10,arg11) \
2570 do { \
2571 volatile OrigFn _orig = (orig); \
2572 volatile unsigned long _argvec[3+11]; \
2573 volatile unsigned long _res; \
2574 /* _argvec[0] holds current r2 across the call */ \
2575 _argvec[1] = (unsigned long)_orig.r2; \
2576 _argvec[2] = (unsigned long)_orig.nraddr; \
2577 _argvec[2+1] = (unsigned long)arg1; \
2578 _argvec[2+2] = (unsigned long)arg2; \
2579 _argvec[2+3] = (unsigned long)arg3; \
2580 _argvec[2+4] = (unsigned long)arg4; \
2581 _argvec[2+5] = (unsigned long)arg5; \
2582 _argvec[2+6] = (unsigned long)arg6; \
2583 _argvec[2+7] = (unsigned long)arg7; \
2584 _argvec[2+8] = (unsigned long)arg8; \
2585 _argvec[2+9] = (unsigned long)arg9; \
2586 _argvec[2+10] = (unsigned long)arg10; \
2587 _argvec[2+11] = (unsigned long)arg11; \
2588 __asm__ volatile( \
2589 "mr 11,%1\n\t" \
2590 "std 2,-16(11)\n\t" /* save tocptr */ \
2591 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2592 "addi 1,1,-144\n\t" /* expand stack frame */ \
2593 /* arg11 */ \
2594 "ld 3,88(11)\n\t" \
2595 "std 3,128(1)\n\t" \
2596 /* arg10 */ \
2597 "ld 3,80(11)\n\t" \
2598 "std 3,120(1)\n\t" \
2599 /* arg9 */ \
2600 "ld 3,72(11)\n\t" \
2601 "std 3,112(1)\n\t" \
2602 /* args1-8 */ \
2603 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2604 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2605 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2606 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2607 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2608 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2609 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2610 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2611 "ld 11, 0(11)\n\t" /* target->r11 */ \
2612 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2613 "mr 11,%1\n\t" \
2614 "mr %0,3\n\t" \
2615 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2616 "addi 1,1,144" /* restore frame */ \
2617 : /*out*/ "=r" (_res) \
2618 : /*in*/ "r" (&_argvec[2]) \
2619 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2620 ); \
2621 lval = (__typeof__(lval)) _res; \
2622 } while (0)
2623
2624 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2625 arg7,arg8,arg9,arg10,arg11,arg12) \
2626 do { \
2627 volatile OrigFn _orig = (orig); \
2628 volatile unsigned long _argvec[3+12]; \
2629 volatile unsigned long _res; \
2630 /* _argvec[0] holds current r2 across the call */ \
2631 _argvec[1] = (unsigned long)_orig.r2; \
2632 _argvec[2] = (unsigned long)_orig.nraddr; \
2633 _argvec[2+1] = (unsigned long)arg1; \
2634 _argvec[2+2] = (unsigned long)arg2; \
2635 _argvec[2+3] = (unsigned long)arg3; \
2636 _argvec[2+4] = (unsigned long)arg4; \
2637 _argvec[2+5] = (unsigned long)arg5; \
2638 _argvec[2+6] = (unsigned long)arg6; \
2639 _argvec[2+7] = (unsigned long)arg7; \
2640 _argvec[2+8] = (unsigned long)arg8; \
2641 _argvec[2+9] = (unsigned long)arg9; \
2642 _argvec[2+10] = (unsigned long)arg10; \
2643 _argvec[2+11] = (unsigned long)arg11; \
2644 _argvec[2+12] = (unsigned long)arg12; \
2645 __asm__ volatile( \
2646 "mr 11,%1\n\t" \
2647 "std 2,-16(11)\n\t" /* save tocptr */ \
2648 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2649 "addi 1,1,-144\n\t" /* expand stack frame */ \
2650 /* arg12 */ \
2651 "ld 3,96(11)\n\t" \
2652 "std 3,136(1)\n\t" \
2653 /* arg11 */ \
2654 "ld 3,88(11)\n\t" \
2655 "std 3,128(1)\n\t" \
2656 /* arg10 */ \
2657 "ld 3,80(11)\n\t" \
2658 "std 3,120(1)\n\t" \
2659 /* arg9 */ \
2660 "ld 3,72(11)\n\t" \
2661 "std 3,112(1)\n\t" \
2662 /* args1-8 */ \
2663 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2664 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2665 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2666 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2667 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2668 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2669 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2670 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2671 "ld 11, 0(11)\n\t" /* target->r11 */ \
2672 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2673 "mr 11,%1\n\t" \
2674 "mr %0,3\n\t" \
2675 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2676 "addi 1,1,144" /* restore frame */ \
2677 : /*out*/ "=r" (_res) \
2678 : /*in*/ "r" (&_argvec[2]) \
2679 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2680 ); \
2681 lval = (__typeof__(lval)) _res; \
2682 } while (0)
2683
2684 #endif /* PLAT_ppc64_linux */
2685
2686 /* ------------------------- arm-linux ------------------------- */
2687
2688 #if defined(PLAT_arm_linux)
2689
2690 /* These regs are trashed by the hidden call. */
2691 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
2692
2693 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
2694 long) == 4. */
2695
2696 #define CALL_FN_W_v(lval, orig) \
2697 do { \
2698 volatile OrigFn _orig = (orig); \
2699 volatile unsigned long _argvec[1]; \
2700 volatile unsigned long _res; \
2701 _argvec[0] = (unsigned long)_orig.nraddr; \
2702 __asm__ volatile( \
2703 "ldr r4, [%1] \n\t" /* target->r4 */ \
2704 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2705 "mov %0, r0\n" \
2706 : /*out*/ "=r" (_res) \
2707 : /*in*/ "0" (&_argvec[0]) \
2708 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2709 ); \
2710 lval = (__typeof__(lval)) _res; \
2711 } while (0)
2712
2713 #define CALL_FN_W_W(lval, orig, arg1) \
2714 do { \
2715 volatile OrigFn _orig = (orig); \
2716 volatile unsigned long _argvec[2]; \
2717 volatile unsigned long _res; \
2718 _argvec[0] = (unsigned long)_orig.nraddr; \
2719 _argvec[1] = (unsigned long)(arg1); \
2720 __asm__ volatile( \
2721 "ldr r0, [%1, #4] \n\t" \
2722 "ldr r4, [%1] \n\t" /* target->r4 */ \
2723 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2724 "mov %0, r0\n" \
2725 : /*out*/ "=r" (_res) \
2726 : /*in*/ "0" (&_argvec[0]) \
2727 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2728 ); \
2729 lval = (__typeof__(lval)) _res; \
2730 } while (0)
2731
2732 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2733 do { \
2734 volatile OrigFn _orig = (orig); \
2735 volatile unsigned long _argvec[3]; \
2736 volatile unsigned long _res; \
2737 _argvec[0] = (unsigned long)_orig.nraddr; \
2738 _argvec[1] = (unsigned long)(arg1); \
2739 _argvec[2] = (unsigned long)(arg2); \
2740 __asm__ volatile( \
2741 "ldr r0, [%1, #4] \n\t" \
2742 "ldr r1, [%1, #8] \n\t" \
2743 "ldr r4, [%1] \n\t" /* target->r4 */ \
2744 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2745 "mov %0, r0\n" \
2746 : /*out*/ "=r" (_res) \
2747 : /*in*/ "0" (&_argvec[0]) \
2748 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2749 ); \
2750 lval = (__typeof__(lval)) _res; \
2751 } while (0)
2752
2753 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2754 do { \
2755 volatile OrigFn _orig = (orig); \
2756 volatile unsigned long _argvec[4]; \
2757 volatile unsigned long _res; \
2758 _argvec[0] = (unsigned long)_orig.nraddr; \
2759 _argvec[1] = (unsigned long)(arg1); \
2760 _argvec[2] = (unsigned long)(arg2); \
2761 _argvec[3] = (unsigned long)(arg3); \
2762 __asm__ volatile( \
2763 "ldr r0, [%1, #4] \n\t" \
2764 "ldr r1, [%1, #8] \n\t" \
2765 "ldr r2, [%1, #12] \n\t" \
2766 "ldr r4, [%1] \n\t" /* target->r4 */ \
2767 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2768 "mov %0, r0\n" \
2769 : /*out*/ "=r" (_res) \
2770 : /*in*/ "0" (&_argvec[0]) \
2771 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2772 ); \
2773 lval = (__typeof__(lval)) _res; \
2774 } while (0)
2775
2776 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2777 do { \
2778 volatile OrigFn _orig = (orig); \
2779 volatile unsigned long _argvec[5]; \
2780 volatile unsigned long _res; \
2781 _argvec[0] = (unsigned long)_orig.nraddr; \
2782 _argvec[1] = (unsigned long)(arg1); \
2783 _argvec[2] = (unsigned long)(arg2); \
2784 _argvec[3] = (unsigned long)(arg3); \
2785 _argvec[4] = (unsigned long)(arg4); \
2786 __asm__ volatile( \
2787 "ldr r0, [%1, #4] \n\t" \
2788 "ldr r1, [%1, #8] \n\t" \
2789 "ldr r2, [%1, #12] \n\t" \
2790 "ldr r3, [%1, #16] \n\t" \
2791 "ldr r4, [%1] \n\t" /* target->r4 */ \
2792 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2793 "mov %0, r0" \
2794 : /*out*/ "=r" (_res) \
2795 : /*in*/ "0" (&_argvec[0]) \
2796 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2797 ); \
2798 lval = (__typeof__(lval)) _res; \
2799 } while (0)
2800
2801 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2802 do { \
2803 volatile OrigFn _orig = (orig); \
2804 volatile unsigned long _argvec[6]; \
2805 volatile unsigned long _res; \
2806 _argvec[0] = (unsigned long)_orig.nraddr; \
2807 _argvec[1] = (unsigned long)(arg1); \
2808 _argvec[2] = (unsigned long)(arg2); \
2809 _argvec[3] = (unsigned long)(arg3); \
2810 _argvec[4] = (unsigned long)(arg4); \
2811 _argvec[5] = (unsigned long)(arg5); \
2812 __asm__ volatile( \
2813 "ldr r0, [%1, #20] \n\t" \
2814 "push {r0} \n\t" \
2815 "ldr r0, [%1, #4] \n\t" \
2816 "ldr r1, [%1, #8] \n\t" \
2817 "ldr r2, [%1, #12] \n\t" \
2818 "ldr r3, [%1, #16] \n\t" \
2819 "ldr r4, [%1] \n\t" /* target->r4 */ \
2820 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2821 "add sp, sp, #4 \n\t" \
2822 "mov %0, r0" \
2823 : /*out*/ "=r" (_res) \
2824 : /*in*/ "0" (&_argvec[0]) \
2825 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2826 ); \
2827 lval = (__typeof__(lval)) _res; \
2828 } while (0)
2829
2830 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2831 do { \
2832 volatile OrigFn _orig = (orig); \
2833 volatile unsigned long _argvec[7]; \
2834 volatile unsigned long _res; \
2835 _argvec[0] = (unsigned long)_orig.nraddr; \
2836 _argvec[1] = (unsigned long)(arg1); \
2837 _argvec[2] = (unsigned long)(arg2); \
2838 _argvec[3] = (unsigned long)(arg3); \
2839 _argvec[4] = (unsigned long)(arg4); \
2840 _argvec[5] = (unsigned long)(arg5); \
2841 _argvec[6] = (unsigned long)(arg6); \
2842 __asm__ volatile( \
2843 "ldr r0, [%1, #20] \n\t" \
2844 "ldr r1, [%1, #24] \n\t" \
2845 "push {r0, r1} \n\t" \
2846 "ldr r0, [%1, #4] \n\t" \
2847 "ldr r1, [%1, #8] \n\t" \
2848 "ldr r2, [%1, #12] \n\t" \
2849 "ldr r3, [%1, #16] \n\t" \
2850 "ldr r4, [%1] \n\t" /* target->r4 */ \
2851 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2852 "add sp, sp, #8 \n\t" \
2853 "mov %0, r0" \
2854 : /*out*/ "=r" (_res) \
2855 : /*in*/ "0" (&_argvec[0]) \
2856 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2857 ); \
2858 lval = (__typeof__(lval)) _res; \
2859 } while (0)
2860
2861 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2862 arg7) \
2863 do { \
2864 volatile OrigFn _orig = (orig); \
2865 volatile unsigned long _argvec[8]; \
2866 volatile unsigned long _res; \
2867 _argvec[0] = (unsigned long)_orig.nraddr; \
2868 _argvec[1] = (unsigned long)(arg1); \
2869 _argvec[2] = (unsigned long)(arg2); \
2870 _argvec[3] = (unsigned long)(arg3); \
2871 _argvec[4] = (unsigned long)(arg4); \
2872 _argvec[5] = (unsigned long)(arg5); \
2873 _argvec[6] = (unsigned long)(arg6); \
2874 _argvec[7] = (unsigned long)(arg7); \
2875 __asm__ volatile( \
2876 "ldr r0, [%1, #20] \n\t" \
2877 "ldr r1, [%1, #24] \n\t" \
2878 "ldr r2, [%1, #28] \n\t" \
2879 "push {r0, r1, r2} \n\t" \
2880 "ldr r0, [%1, #4] \n\t" \
2881 "ldr r1, [%1, #8] \n\t" \
2882 "ldr r2, [%1, #12] \n\t" \
2883 "ldr r3, [%1, #16] \n\t" \
2884 "ldr r4, [%1] \n\t" /* target->r4 */ \
2885 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2886 "add sp, sp, #12 \n\t" \
2887 "mov %0, r0" \
2888 : /*out*/ "=r" (_res) \
2889 : /*in*/ "0" (&_argvec[0]) \
2890 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2891 ); \
2892 lval = (__typeof__(lval)) _res; \
2893 } while (0)
2894
2895 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2896 arg7,arg8) \
2897 do { \
2898 volatile OrigFn _orig = (orig); \
2899 volatile unsigned long _argvec[9]; \
2900 volatile unsigned long _res; \
2901 _argvec[0] = (unsigned long)_orig.nraddr; \
2902 _argvec[1] = (unsigned long)(arg1); \
2903 _argvec[2] = (unsigned long)(arg2); \
2904 _argvec[3] = (unsigned long)(arg3); \
2905 _argvec[4] = (unsigned long)(arg4); \
2906 _argvec[5] = (unsigned long)(arg5); \
2907 _argvec[6] = (unsigned long)(arg6); \
2908 _argvec[7] = (unsigned long)(arg7); \
2909 _argvec[8] = (unsigned long)(arg8); \
2910 __asm__ volatile( \
2911 "ldr r0, [%1, #20] \n\t" \
2912 "ldr r1, [%1, #24] \n\t" \
2913 "ldr r2, [%1, #28] \n\t" \
2914 "ldr r3, [%1, #32] \n\t" \
2915 "push {r0, r1, r2, r3} \n\t" \
2916 "ldr r0, [%1, #4] \n\t" \
2917 "ldr r1, [%1, #8] \n\t" \
2918 "ldr r2, [%1, #12] \n\t" \
2919 "ldr r3, [%1, #16] \n\t" \
2920 "ldr r4, [%1] \n\t" /* target->r4 */ \
2921 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2922 "add sp, sp, #16 \n\t" \
2923 "mov %0, r0" \
2924 : /*out*/ "=r" (_res) \
2925 : /*in*/ "0" (&_argvec[0]) \
2926 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2927 ); \
2928 lval = (__typeof__(lval)) _res; \
2929 } while (0)
2930
2931 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2932 arg7,arg8,arg9) \
2933 do { \
2934 volatile OrigFn _orig = (orig); \
2935 volatile unsigned long _argvec[10]; \
2936 volatile unsigned long _res; \
2937 _argvec[0] = (unsigned long)_orig.nraddr; \
2938 _argvec[1] = (unsigned long)(arg1); \
2939 _argvec[2] = (unsigned long)(arg2); \
2940 _argvec[3] = (unsigned long)(arg3); \
2941 _argvec[4] = (unsigned long)(arg4); \
2942 _argvec[5] = (unsigned long)(arg5); \
2943 _argvec[6] = (unsigned long)(arg6); \
2944 _argvec[7] = (unsigned long)(arg7); \
2945 _argvec[8] = (unsigned long)(arg8); \
2946 _argvec[9] = (unsigned long)(arg9); \
2947 __asm__ volatile( \
2948 "ldr r0, [%1, #20] \n\t" \
2949 "ldr r1, [%1, #24] \n\t" \
2950 "ldr r2, [%1, #28] \n\t" \
2951 "ldr r3, [%1, #32] \n\t" \
2952 "ldr r4, [%1, #36] \n\t" \
2953 "push {r0, r1, r2, r3, r4} \n\t" \
2954 "ldr r0, [%1, #4] \n\t" \
2955 "ldr r1, [%1, #8] \n\t" \
2956 "ldr r2, [%1, #12] \n\t" \
2957 "ldr r3, [%1, #16] \n\t" \
2958 "ldr r4, [%1] \n\t" /* target->r4 */ \
2959 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2960 "add sp, sp, #20 \n\t" \
2961 "mov %0, r0" \
2962 : /*out*/ "=r" (_res) \
2963 : /*in*/ "0" (&_argvec[0]) \
2964 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
2965 ); \
2966 lval = (__typeof__(lval)) _res; \
2967 } while (0)
2968
2969 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2970 arg7,arg8,arg9,arg10) \
2971 do { \
2972 volatile OrigFn _orig = (orig); \
2973 volatile unsigned long _argvec[11]; \
2974 volatile unsigned long _res; \
2975 _argvec[0] = (unsigned long)_orig.nraddr; \
2976 _argvec[1] = (unsigned long)(arg1); \
2977 _argvec[2] = (unsigned long)(arg2); \
2978 _argvec[3] = (unsigned long)(arg3); \
2979 _argvec[4] = (unsigned long)(arg4); \
2980 _argvec[5] = (unsigned long)(arg5); \
2981 _argvec[6] = (unsigned long)(arg6); \
2982 _argvec[7] = (unsigned long)(arg7); \
2983 _argvec[8] = (unsigned long)(arg8); \
2984 _argvec[9] = (unsigned long)(arg9); \
2985 _argvec[10] = (unsigned long)(arg10); \
2986 __asm__ volatile( \
2987 "ldr r0, [%1, #40] \n\t" \
2988 "push {r0} \n\t" \
2989 "ldr r0, [%1, #20] \n\t" \
2990 "ldr r1, [%1, #24] \n\t" \
2991 "ldr r2, [%1, #28] \n\t" \
2992 "ldr r3, [%1, #32] \n\t" \
2993 "ldr r4, [%1, #36] \n\t" \
2994 "push {r0, r1, r2, r3, r4} \n\t" \
2995 "ldr r0, [%1, #4] \n\t" \
2996 "ldr r1, [%1, #8] \n\t" \
2997 "ldr r2, [%1, #12] \n\t" \
2998 "ldr r3, [%1, #16] \n\t" \
2999 "ldr r4, [%1] \n\t" /* target->r4 */ \
3000 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3001 "add sp, sp, #24 \n\t" \
3002 "mov %0, r0" \
3003 : /*out*/ "=r" (_res) \
3004 : /*in*/ "0" (&_argvec[0]) \
3005 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3006 ); \
3007 lval = (__typeof__(lval)) _res; \
3008 } while (0)
3009
3010 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3011 arg6,arg7,arg8,arg9,arg10, \
3012 arg11) \
3013 do { \
3014 volatile OrigFn _orig = (orig); \
3015 volatile unsigned long _argvec[12]; \
3016 volatile unsigned long _res; \
3017 _argvec[0] = (unsigned long)_orig.nraddr; \
3018 _argvec[1] = (unsigned long)(arg1); \
3019 _argvec[2] = (unsigned long)(arg2); \
3020 _argvec[3] = (unsigned long)(arg3); \
3021 _argvec[4] = (unsigned long)(arg4); \
3022 _argvec[5] = (unsigned long)(arg5); \
3023 _argvec[6] = (unsigned long)(arg6); \
3024 _argvec[7] = (unsigned long)(arg7); \
3025 _argvec[8] = (unsigned long)(arg8); \
3026 _argvec[9] = (unsigned long)(arg9); \
3027 _argvec[10] = (unsigned long)(arg10); \
3028 _argvec[11] = (unsigned long)(arg11); \
3029 __asm__ volatile( \
3030 "ldr r0, [%1, #40] \n\t" \
3031 "ldr r1, [%1, #44] \n\t" \
3032 "push {r0, r1} \n\t" \
3033 "ldr r0, [%1, #20] \n\t" \
3034 "ldr r1, [%1, #24] \n\t" \
3035 "ldr r2, [%1, #28] \n\t" \
3036 "ldr r3, [%1, #32] \n\t" \
3037 "ldr r4, [%1, #36] \n\t" \
3038 "push {r0, r1, r2, r3, r4} \n\t" \
3039 "ldr r0, [%1, #4] \n\t" \
3040 "ldr r1, [%1, #8] \n\t" \
3041 "ldr r2, [%1, #12] \n\t" \
3042 "ldr r3, [%1, #16] \n\t" \
3043 "ldr r4, [%1] \n\t" /* target->r4 */ \
3044 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3045 "add sp, sp, #28 \n\t" \
3046 "mov %0, r0" \
3047 : /*out*/ "=r" (_res) \
3048 : /*in*/ "0" (&_argvec[0]) \
3049 : /*trash*/ "cc", "memory",__CALLER_SAVED_REGS \
3050 ); \
3051 lval = (__typeof__(lval)) _res; \
3052 } while (0)
3053
3054 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3055 arg6,arg7,arg8,arg9,arg10, \
3056 arg11,arg12) \
3057 do { \
3058 volatile OrigFn _orig = (orig); \
3059 volatile unsigned long _argvec[13]; \
3060 volatile unsigned long _res; \
3061 _argvec[0] = (unsigned long)_orig.nraddr; \
3062 _argvec[1] = (unsigned long)(arg1); \
3063 _argvec[2] = (unsigned long)(arg2); \
3064 _argvec[3] = (unsigned long)(arg3); \
3065 _argvec[4] = (unsigned long)(arg4); \
3066 _argvec[5] = (unsigned long)(arg5); \
3067 _argvec[6] = (unsigned long)(arg6); \
3068 _argvec[7] = (unsigned long)(arg7); \
3069 _argvec[8] = (unsigned long)(arg8); \
3070 _argvec[9] = (unsigned long)(arg9); \
3071 _argvec[10] = (unsigned long)(arg10); \
3072 _argvec[11] = (unsigned long)(arg11); \
3073 _argvec[12] = (unsigned long)(arg12); \
3074 __asm__ volatile( \
3075 "ldr r0, [%1, #40] \n\t" \
3076 "ldr r1, [%1, #44] \n\t" \
3077 "ldr r2, [%1, #48] \n\t" \
3078 "push {r0, r1, r2} \n\t" \
3079 "ldr r0, [%1, #20] \n\t" \
3080 "ldr r1, [%1, #24] \n\t" \
3081 "ldr r2, [%1, #28] \n\t" \
3082 "ldr r3, [%1, #32] \n\t" \
3083 "ldr r4, [%1, #36] \n\t" \
3084 "push {r0, r1, r2, r3, r4} \n\t" \
3085 "ldr r0, [%1, #4] \n\t" \
3086 "ldr r1, [%1, #8] \n\t" \
3087 "ldr r2, [%1, #12] \n\t" \
3088 "ldr r3, [%1, #16] \n\t" \
3089 "ldr r4, [%1] \n\t" /* target->r4 */ \
3090 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3091 "add sp, sp, #32 \n\t" \
3092 "mov %0, r0" \
3093 : /*out*/ "=r" (_res) \
3094 : /*in*/ "0" (&_argvec[0]) \
3095 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3096 ); \
3097 lval = (__typeof__(lval)) _res; \
3098 } while (0)
3099
3100 #endif /* PLAT_arm_linux */
3101
3102 /* ------------------------- s390x-linux ------------------------- */
3103
3104 #if defined(PLAT_s390x_linux)
3105
3106 /* Similar workaround as amd64 (see above), but we use r11 as frame
3107 pointer and save the old r11 in r7. r11 might be used for
3108 argvec, therefore we copy argvec in r1 since r1 is clobbered
3109 after the call anyway. */
3110 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
3111 # define __FRAME_POINTER \
3112 ,"d"(__builtin_dwarf_cfa())
3113 # define VALGRIND_CFI_PROLOGUE \
3114 ".cfi_remember_state\n\t" \
3115 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
3116 "lgr 7,11\n\t" \
3117 "lgr 11,%2\n\t" \
3118 ".cfi_def_cfa r11, 0\n\t"
3119 # define VALGRIND_CFI_EPILOGUE \
3120 "lgr 11, 7\n\t" \
3121 ".cfi_restore_state\n\t"
3122 #else
3123 # define __FRAME_POINTER
3124 # define VALGRIND_CFI_PROLOGUE \
3125 "lgr 1,%1\n\t"
3126 # define VALGRIND_CFI_EPILOGUE
3127 #endif
3128
3129
3130
3131
3132 /* These regs are trashed by the hidden call. Note that we overwrite
3133 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
3134 function a proper return address. All others are ABI defined call
3135 clobbers. */
3136 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
3137 "f0","f1","f2","f3","f4","f5","f6","f7"
3138
3139
3140 #define CALL_FN_W_v(lval, orig) \
3141 do { \
3142 volatile OrigFn _orig = (orig); \
3143 volatile unsigned long _argvec[1]; \
3144 volatile unsigned long _res; \
3145 _argvec[0] = (unsigned long)_orig.nraddr; \
3146 __asm__ volatile( \
3147 VALGRIND_CFI_PROLOGUE \
3148 "aghi 15,-160\n\t" \
3149 "lg 1, 0(1)\n\t" /* target->r1 */ \
3150 VALGRIND_CALL_NOREDIR_R1 \
3151 "lgr %0, 2\n\t" \
3152 "aghi 15,160\n\t" \
3153 VALGRIND_CFI_EPILOGUE \
3154 : /*out*/ "=d" (_res) \
3155 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
3156 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3157 ); \
3158 lval = (__typeof__(lval)) _res; \
3159 } while (0)
3160
3161 /* The call abi has the arguments in r2-r6 and stack */
3162 #define CALL_FN_W_W(lval, orig, arg1) \
3163 do { \
3164 volatile OrigFn _orig = (orig); \
3165 volatile unsigned long _argvec[2]; \
3166 volatile unsigned long _res; \
3167 _argvec[0] = (unsigned long)_orig.nraddr; \
3168 _argvec[1] = (unsigned long)arg1; \
3169 __asm__ volatile( \
3170 VALGRIND_CFI_PROLOGUE \
3171 "aghi 15,-160\n\t" \
3172 "lg 2, 8(1)\n\t" \
3173 "lg 1, 0(1)\n\t" \
3174 VALGRIND_CALL_NOREDIR_R1 \
3175 "lgr %0, 2\n\t" \
3176 "aghi 15,160\n\t" \
3177 VALGRIND_CFI_EPILOGUE \
3178 : /*out*/ "=d" (_res) \
3179 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3180 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3181 ); \
3182 lval = (__typeof__(lval)) _res; \
3183 } while (0)
3184
3185 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
3186 do { \
3187 volatile OrigFn _orig = (orig); \
3188 volatile unsigned long _argvec[3]; \
3189 volatile unsigned long _res; \
3190 _argvec[0] = (unsigned long)_orig.nraddr; \
3191 _argvec[1] = (unsigned long)arg1; \
3192 _argvec[2] = (unsigned long)arg2; \
3193 __asm__ volatile( \
3194 VALGRIND_CFI_PROLOGUE \
3195 "aghi 15,-160\n\t" \
3196 "lg 2, 8(1)\n\t" \
3197 "lg 3,16(1)\n\t" \
3198 "lg 1, 0(1)\n\t" \
3199 VALGRIND_CALL_NOREDIR_R1 \
3200 "lgr %0, 2\n\t" \
3201 "aghi 15,160\n\t" \
3202 VALGRIND_CFI_EPILOGUE \
3203 : /*out*/ "=d" (_res) \
3204 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3205 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3206 ); \
3207 lval = (__typeof__(lval)) _res; \
3208 } while (0)
3209
3210 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
3211 do { \
3212 volatile OrigFn _orig = (orig); \
3213 volatile unsigned long _argvec[4]; \
3214 volatile unsigned long _res; \
3215 _argvec[0] = (unsigned long)_orig.nraddr; \
3216 _argvec[1] = (unsigned long)arg1; \
3217 _argvec[2] = (unsigned long)arg2; \
3218 _argvec[3] = (unsigned long)arg3; \
3219 __asm__ volatile( \
3220 VALGRIND_CFI_PROLOGUE \
3221 "aghi 15,-160\n\t" \
3222 "lg 2, 8(1)\n\t" \
3223 "lg 3,16(1)\n\t" \
3224 "lg 4,24(1)\n\t" \
3225 "lg 1, 0(1)\n\t" \
3226 VALGRIND_CALL_NOREDIR_R1 \
3227 "lgr %0, 2\n\t" \
3228 "aghi 15,160\n\t" \
3229 VALGRIND_CFI_EPILOGUE \
3230 : /*out*/ "=d" (_res) \
3231 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3232 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3233 ); \
3234 lval = (__typeof__(lval)) _res; \
3235 } while (0)
3236
3237 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
3238 do { \
3239 volatile OrigFn _orig = (orig); \
3240 volatile unsigned long _argvec[5]; \
3241 volatile unsigned long _res; \
3242 _argvec[0] = (unsigned long)_orig.nraddr; \
3243 _argvec[1] = (unsigned long)arg1; \
3244 _argvec[2] = (unsigned long)arg2; \
3245 _argvec[3] = (unsigned long)arg3; \
3246 _argvec[4] = (unsigned long)arg4; \
3247 __asm__ volatile( \
3248 VALGRIND_CFI_PROLOGUE \
3249 "aghi 15,-160\n\t" \
3250 "lg 2, 8(1)\n\t" \
3251 "lg 3,16(1)\n\t" \
3252 "lg 4,24(1)\n\t" \
3253 "lg 5,32(1)\n\t" \
3254 "lg 1, 0(1)\n\t" \
3255 VALGRIND_CALL_NOREDIR_R1 \
3256 "lgr %0, 2\n\t" \
3257 "aghi 15,160\n\t" \
3258 VALGRIND_CFI_EPILOGUE \
3259 : /*out*/ "=d" (_res) \
3260 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3261 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3262 ); \
3263 lval = (__typeof__(lval)) _res; \
3264 } while (0)
3265
3266 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
3267 do { \
3268 volatile OrigFn _orig = (orig); \
3269 volatile unsigned long _argvec[6]; \
3270 volatile unsigned long _res; \
3271 _argvec[0] = (unsigned long)_orig.nraddr; \
3272 _argvec[1] = (unsigned long)arg1; \
3273 _argvec[2] = (unsigned long)arg2; \
3274 _argvec[3] = (unsigned long)arg3; \
3275 _argvec[4] = (unsigned long)arg4; \
3276 _argvec[5] = (unsigned long)arg5; \
3277 __asm__ volatile( \
3278 VALGRIND_CFI_PROLOGUE \
3279 "aghi 15,-160\n\t" \
3280 "lg 2, 8(1)\n\t" \
3281 "lg 3,16(1)\n\t" \
3282 "lg 4,24(1)\n\t" \
3283 "lg 5,32(1)\n\t" \
3284 "lg 6,40(1)\n\t" \
3285 "lg 1, 0(1)\n\t" \
3286 VALGRIND_CALL_NOREDIR_R1 \
3287 "lgr %0, 2\n\t" \
3288 "aghi 15,160\n\t" \
3289 VALGRIND_CFI_EPILOGUE \
3290 : /*out*/ "=d" (_res) \
3291 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3292 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3293 ); \
3294 lval = (__typeof__(lval)) _res; \
3295 } while (0)
3296
3297 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3298 arg6) \
3299 do { \
3300 volatile OrigFn _orig = (orig); \
3301 volatile unsigned long _argvec[7]; \
3302 volatile unsigned long _res; \
3303 _argvec[0] = (unsigned long)_orig.nraddr; \
3304 _argvec[1] = (unsigned long)arg1; \
3305 _argvec[2] = (unsigned long)arg2; \
3306 _argvec[3] = (unsigned long)arg3; \
3307 _argvec[4] = (unsigned long)arg4; \
3308 _argvec[5] = (unsigned long)arg5; \
3309 _argvec[6] = (unsigned long)arg6; \
3310 __asm__ volatile( \
3311 VALGRIND_CFI_PROLOGUE \
3312 "aghi 15,-168\n\t" \
3313 "lg 2, 8(1)\n\t" \
3314 "lg 3,16(1)\n\t" \
3315 "lg 4,24(1)\n\t" \
3316 "lg 5,32(1)\n\t" \
3317 "lg 6,40(1)\n\t" \
3318 "mvc 160(8,15), 48(1)\n\t" \
3319 "lg 1, 0(1)\n\t" \
3320 VALGRIND_CALL_NOREDIR_R1 \
3321 "lgr %0, 2\n\t" \
3322 "aghi 15,168\n\t" \
3323 VALGRIND_CFI_EPILOGUE \
3324 : /*out*/ "=d" (_res) \
3325 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3326 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3327 ); \
3328 lval = (__typeof__(lval)) _res; \
3329 } while (0)
3330
3331 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3332 arg6, arg7) \
3333 do { \
3334 volatile OrigFn _orig = (orig); \
3335 volatile unsigned long _argvec[8]; \
3336 volatile unsigned long _res; \
3337 _argvec[0] = (unsigned long)_orig.nraddr; \
3338 _argvec[1] = (unsigned long)arg1; \
3339 _argvec[2] = (unsigned long)arg2; \
3340 _argvec[3] = (unsigned long)arg3; \
3341 _argvec[4] = (unsigned long)arg4; \
3342 _argvec[5] = (unsigned long)arg5; \
3343 _argvec[6] = (unsigned long)arg6; \
3344 _argvec[7] = (unsigned long)arg7; \
3345 __asm__ volatile( \
3346 VALGRIND_CFI_PROLOGUE \
3347 "aghi 15,-176\n\t" \
3348 "lg 2, 8(1)\n\t" \
3349 "lg 3,16(1)\n\t" \
3350 "lg 4,24(1)\n\t" \
3351 "lg 5,32(1)\n\t" \
3352 "lg 6,40(1)\n\t" \
3353 "mvc 160(8,15), 48(1)\n\t" \
3354 "mvc 168(8,15), 56(1)\n\t" \
3355 "lg 1, 0(1)\n\t" \
3356 VALGRIND_CALL_NOREDIR_R1 \
3357 "lgr %0, 2\n\t" \
3358 "aghi 15,176\n\t" \
3359 VALGRIND_CFI_EPILOGUE \
3360 : /*out*/ "=d" (_res) \
3361 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3362 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3363 ); \
3364 lval = (__typeof__(lval)) _res; \
3365 } while (0)
3366
3367 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3368 arg6, arg7 ,arg8) \
3369 do { \
3370 volatile OrigFn _orig = (orig); \
3371 volatile unsigned long _argvec[9]; \
3372 volatile unsigned long _res; \
3373 _argvec[0] = (unsigned long)_orig.nraddr; \
3374 _argvec[1] = (unsigned long)arg1; \
3375 _argvec[2] = (unsigned long)arg2; \
3376 _argvec[3] = (unsigned long)arg3; \
3377 _argvec[4] = (unsigned long)arg4; \
3378 _argvec[5] = (unsigned long)arg5; \
3379 _argvec[6] = (unsigned long)arg6; \
3380 _argvec[7] = (unsigned long)arg7; \
3381 _argvec[8] = (unsigned long)arg8; \
3382 __asm__ volatile( \
3383 VALGRIND_CFI_PROLOGUE \
3384 "aghi 15,-184\n\t" \
3385 "lg 2, 8(1)\n\t" \
3386 "lg 3,16(1)\n\t" \
3387 "lg 4,24(1)\n\t" \
3388 "lg 5,32(1)\n\t" \
3389 "lg 6,40(1)\n\t" \
3390 "mvc 160(8,15), 48(1)\n\t" \
3391 "mvc 168(8,15), 56(1)\n\t" \
3392 "mvc 176(8,15), 64(1)\n\t" \
3393 "lg 1, 0(1)\n\t" \
3394 VALGRIND_CALL_NOREDIR_R1 \
3395 "lgr %0, 2\n\t" \
3396 "aghi 15,184\n\t" \
3397 VALGRIND_CFI_EPILOGUE \
3398 : /*out*/ "=d" (_res) \
3399 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3400 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3401 ); \
3402 lval = (__typeof__(lval)) _res; \
3403 } while (0)
3404
3405 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3406 arg6, arg7 ,arg8, arg9) \
3407 do { \
3408 volatile OrigFn _orig = (orig); \
3409 volatile unsigned long _argvec[10]; \
3410 volatile unsigned long _res; \
3411 _argvec[0] = (unsigned long)_orig.nraddr; \
3412 _argvec[1] = (unsigned long)arg1; \
3413 _argvec[2] = (unsigned long)arg2; \
3414 _argvec[3] = (unsigned long)arg3; \
3415 _argvec[4] = (unsigned long)arg4; \
3416 _argvec[5] = (unsigned long)arg5; \
3417 _argvec[6] = (unsigned long)arg6; \
3418 _argvec[7] = (unsigned long)arg7; \
3419 _argvec[8] = (unsigned long)arg8; \
3420 _argvec[9] = (unsigned long)arg9; \
3421 __asm__ volatile( \
3422 VALGRIND_CFI_PROLOGUE \
3423 "aghi 15,-192\n\t" \
3424 "lg 2, 8(1)\n\t" \
3425 "lg 3,16(1)\n\t" \
3426 "lg 4,24(1)\n\t" \
3427 "lg 5,32(1)\n\t" \
3428 "lg 6,40(1)\n\t" \
3429 "mvc 160(8,15), 48(1)\n\t" \
3430 "mvc 168(8,15), 56(1)\n\t" \
3431 "mvc 176(8,15), 64(1)\n\t" \
3432 "mvc 184(8,15), 72(1)\n\t" \
3433 "lg 1, 0(1)\n\t" \
3434 VALGRIND_CALL_NOREDIR_R1 \
3435 "lgr %0, 2\n\t" \
3436 "aghi 15,192\n\t" \
3437 VALGRIND_CFI_EPILOGUE \
3438 : /*out*/ "=d" (_res) \
3439 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3440 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3441 ); \
3442 lval = (__typeof__(lval)) _res; \
3443 } while (0)
3444
3445 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3446 arg6, arg7 ,arg8, arg9, arg10) \
3447 do { \
3448 volatile OrigFn _orig = (orig); \
3449 volatile unsigned long _argvec[11]; \
3450 volatile unsigned long _res; \
3451 _argvec[0] = (unsigned long)_orig.nraddr; \
3452 _argvec[1] = (unsigned long)arg1; \
3453 _argvec[2] = (unsigned long)arg2; \
3454 _argvec[3] = (unsigned long)arg3; \
3455 _argvec[4] = (unsigned long)arg4; \
3456 _argvec[5] = (unsigned long)arg5; \
3457 _argvec[6] = (unsigned long)arg6; \
3458 _argvec[7] = (unsigned long)arg7; \
3459 _argvec[8] = (unsigned long)arg8; \
3460 _argvec[9] = (unsigned long)arg9; \
3461 _argvec[10] = (unsigned long)arg10; \
3462 __asm__ volatile( \
3463 VALGRIND_CFI_PROLOGUE \
3464 "aghi 15,-200\n\t" \
3465 "lg 2, 8(1)\n\t" \
3466 "lg 3,16(1)\n\t" \
3467 "lg 4,24(1)\n\t" \
3468 "lg 5,32(1)\n\t" \
3469 "lg 6,40(1)\n\t" \
3470 "mvc 160(8,15), 48(1)\n\t" \
3471 "mvc 168(8,15), 56(1)\n\t" \
3472 "mvc 176(8,15), 64(1)\n\t" \
3473 "mvc 184(8,15), 72(1)\n\t" \
3474 "mvc 192(8,15), 80(1)\n\t" \
3475 "lg 1, 0(1)\n\t" \
3476 VALGRIND_CALL_NOREDIR_R1 \
3477 "lgr %0, 2\n\t" \
3478 "aghi 15,200\n\t" \
3479 VALGRIND_CFI_EPILOGUE \
3480 : /*out*/ "=d" (_res) \
3481 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3482 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3483 ); \
3484 lval = (__typeof__(lval)) _res; \
3485 } while (0)
3486
3487 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3488 arg6, arg7 ,arg8, arg9, arg10, arg11) \
3489 do { \
3490 volatile OrigFn _orig = (orig); \
3491 volatile unsigned long _argvec[12]; \
3492 volatile unsigned long _res; \
3493 _argvec[0] = (unsigned long)_orig.nraddr; \
3494 _argvec[1] = (unsigned long)arg1; \
3495 _argvec[2] = (unsigned long)arg2; \
3496 _argvec[3] = (unsigned long)arg3; \
3497 _argvec[4] = (unsigned long)arg4; \
3498 _argvec[5] = (unsigned long)arg5; \
3499 _argvec[6] = (unsigned long)arg6; \
3500 _argvec[7] = (unsigned long)arg7; \
3501 _argvec[8] = (unsigned long)arg8; \
3502 _argvec[9] = (unsigned long)arg9; \
3503 _argvec[10] = (unsigned long)arg10; \
3504 _argvec[11] = (unsigned long)arg11; \
3505 __asm__ volatile( \
3506 VALGRIND_CFI_PROLOGUE \
3507 "aghi 15,-208\n\t" \
3508 "lg 2, 8(1)\n\t" \
3509 "lg 3,16(1)\n\t" \
3510 "lg 4,24(1)\n\t" \
3511 "lg 5,32(1)\n\t" \
3512 "lg 6,40(1)\n\t" \
3513 "mvc 160(8,15), 48(1)\n\t" \
3514 "mvc 168(8,15), 56(1)\n\t" \
3515 "mvc 176(8,15), 64(1)\n\t" \
3516 "mvc 184(8,15), 72(1)\n\t" \
3517 "mvc 192(8,15), 80(1)\n\t" \
3518 "mvc 200(8,15), 88(1)\n\t" \
3519 "lg 1, 0(1)\n\t" \
3520 VALGRIND_CALL_NOREDIR_R1 \
3521 "lgr %0, 2\n\t" \
3522 "aghi 15,208\n\t" \
3523 VALGRIND_CFI_EPILOGUE \
3524 : /*out*/ "=d" (_res) \
3525 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3526 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3527 ); \
3528 lval = (__typeof__(lval)) _res; \
3529 } while (0)
3530
3531 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3532 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
3533 do { \
3534 volatile OrigFn _orig = (orig); \
3535 volatile unsigned long _argvec[13]; \
3536 volatile unsigned long _res; \
3537 _argvec[0] = (unsigned long)_orig.nraddr; \
3538 _argvec[1] = (unsigned long)arg1; \
3539 _argvec[2] = (unsigned long)arg2; \
3540 _argvec[3] = (unsigned long)arg3; \
3541 _argvec[4] = (unsigned long)arg4; \
3542 _argvec[5] = (unsigned long)arg5; \
3543 _argvec[6] = (unsigned long)arg6; \
3544 _argvec[7] = (unsigned long)arg7; \
3545 _argvec[8] = (unsigned long)arg8; \
3546 _argvec[9] = (unsigned long)arg9; \
3547 _argvec[10] = (unsigned long)arg10; \
3548 _argvec[11] = (unsigned long)arg11; \
3549 _argvec[12] = (unsigned long)arg12; \
3550 __asm__ volatile( \
3551 VALGRIND_CFI_PROLOGUE \
3552 "aghi 15,-216\n\t" \
3553 "lg 2, 8(1)\n\t" \
3554 "lg 3,16(1)\n\t" \
3555 "lg 4,24(1)\n\t" \
3556 "lg 5,32(1)\n\t" \
3557 "lg 6,40(1)\n\t" \
3558 "mvc 160(8,15), 48(1)\n\t" \
3559 "mvc 168(8,15), 56(1)\n\t" \
3560 "mvc 176(8,15), 64(1)\n\t" \
3561 "mvc 184(8,15), 72(1)\n\t" \
3562 "mvc 192(8,15), 80(1)\n\t" \
3563 "mvc 200(8,15), 88(1)\n\t" \
3564 "mvc 208(8,15), 96(1)\n\t" \
3565 "lg 1, 0(1)\n\t" \
3566 VALGRIND_CALL_NOREDIR_R1 \
3567 "lgr %0, 2\n\t" \
3568 "aghi 15,216\n\t" \
3569 VALGRIND_CFI_EPILOGUE \
3570 : /*out*/ "=d" (_res) \
3571 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3572 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3573 ); \
3574 lval = (__typeof__(lval)) _res; \
3575 } while (0)
3576
3577
3578 #endif /* PLAT_s390x_linux */
3579
3580
3581 /* ------------------------------------------------------------------ */
3582 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
3583 /* */
3584 /* ------------------------------------------------------------------ */
3585
3586 /* Some request codes. There are many more of these, but most are not
3587 exposed to end-user view. These are the public ones, all of the
3588 form 0x1000 + small_number.
3589
3590 Core ones are in the range 0x00000000--0x0000ffff. The non-public
3591 ones start at 0x2000.
3592 */
3593
3594 /* These macros are used by tools -- they must be public, but don't
3595 embed them into other programs. */
3596 #define VG_USERREQ_TOOL_BASE(a,b) \
3597 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
3598 #define VG_IS_TOOL_USERREQ(a, b, v) \
3599 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
3600
3601 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
3602 This enum comprises an ABI exported by Valgrind to programs
3603 which use client requests. DO NOT CHANGE THE ORDER OF THESE
3604 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
3605 typedef
3606 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
3607 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
3608
3609 /* These allow any function to be called from the simulated
3610 CPU but run on the real CPU. Nb: the first arg passed to
3611 the function is always the ThreadId of the running
3612 thread! So CLIENT_CALL0 actually requires a 1 arg
3613 function, etc. */
3614 VG_USERREQ__CLIENT_CALL0 = 0x1101,
3615 VG_USERREQ__CLIENT_CALL1 = 0x1102,
3616 VG_USERREQ__CLIENT_CALL2 = 0x1103,
3617 VG_USERREQ__CLIENT_CALL3 = 0x1104,
3618
3619 /* Can be useful in regression testing suites -- eg. can
3620 send Valgrind's output to /dev/null and still count
3621 errors. */
3622 VG_USERREQ__COUNT_ERRORS = 0x1201,
3623
3624 /* Allows a string (gdb monitor command) to be passed to the tool
3625 Used for interaction with vgdb/gdb */
3626 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
3627
3628 /* These are useful and can be interpreted by any tool that
3629 tracks malloc() et al, by using vg_replace_malloc.c. */
3630 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
3631 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
3632 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
3633 /* Memory pool support. */
3634 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
3635 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
3636 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
3637 VG_USERREQ__MEMPOOL_FREE = 0x1306,
3638 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
3639 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
3640 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
3641 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
3642
3643 /* Allow printfs to valgrind log. */
3644 /* The first two pass the va_list argument by value, which
3645 assumes it is the same size as or smaller than a UWord,
3646 which generally isn't the case. Hence are deprecated.
3647 The second two pass the vargs by reference and so are
3648 immune to this problem. */
3649 /* both :: char* fmt, va_list vargs (DEPRECATED) */
3650 VG_USERREQ__PRINTF = 0x1401,
3651 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
3652 /* both :: char* fmt, va_list* vargs */
3653 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
3654 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
3655
3656 /* Stack support. */
3657 VG_USERREQ__STACK_REGISTER = 0x1501,
3658 VG_USERREQ__STACK_DEREGISTER = 0x1502,
3659 VG_USERREQ__STACK_CHANGE = 0x1503,
3660
3661 /* Wine support */
3662 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
3663
3664 /* Querying of debug info. */
3665 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
3666
3667 /* Disable/enable error reporting level. Takes a single
3668 Word arg which is the delta to this thread's error
3669 disablement indicator. Hence 1 disables or further
3670 disables errors, and -1 moves back towards enablement.
3671 Other values are not allowed. */
3672 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801
3673 } Vg_ClientRequest;
3674
3675 #if !defined(__GNUC__)
3676 # define __extension__ /* */
3677 #endif
3678
3679
3680 /* Returns the number of Valgrinds this code is running under. That
3681 is, 0 if running natively, 1 if running under Valgrind, 2 if
3682 running under Valgrind which is running under another Valgrind,
3683 etc. */
3684 #define RUNNING_ON_VALGRIND \
3685 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
3686 VG_USERREQ__RUNNING_ON_VALGRIND, \
3687 0, 0, 0, 0, 0) \
3688
3689
3690 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
3691 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
3692 since it provides a way to make sure valgrind will retranslate the
3693 invalidated area. Returns no value. */
3694 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
3695 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
3696 _qzz_addr, _qzz_len, 0, 0, 0)
3697
3698
3699 /* These requests are for getting Valgrind itself to print something.
3700 Possibly with a backtrace. This is a really ugly hack. The return value
3701 is the number of characters printed, excluding the "**<pid>** " part at the
3702 start and the backtrace (if present). */
3703
3704 #if defined(__GNUC__) || defined(__INTEL_COMPILER)
3705 /* Modern GCC will optimize the static routine out if unused,
3706 and unused attribute will shut down warnings about it. */
3707 static int VALGRIND_PRINTF(const char *format, ...)
3708 __attribute__((format(__printf__, 1, 2), __unused__));
3709 #endif
3710 static int
3711 #if defined(_MSC_VER)
3712 __inline
3713 #endif
3714 VALGRIND_PRINTF(const char *format, ...)
3715 {
3716 #if defined(NVALGRIND)
3717 return 0;
3718 #else /* NVALGRIND */
3719 #if defined(_MSC_VER)
3720 uintptr_t _qzz_res;
3721 #else
3722 unsigned long _qzz_res;
3723 #endif
3724 va_list vargs;
3725 va_start(vargs, format);
3726 #if defined(_MSC_VER)
3727 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
3728 VG_USERREQ__PRINTF_VALIST_BY_REF,
3729 (uintptr_t)format,
3730 (uintptr_t)&vargs,
3731 0, 0, 0);
3732 #else
3733 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
3734 VG_USERREQ__PRINTF_VALIST_BY_REF,
3735 (unsigned long)format,
3736 (unsigned long)&vargs,
3737 0, 0, 0);
3738 #endif
3739 va_end(vargs);
3740 return (int)_qzz_res;
3741 #endif /* NVALGRIND */
3742 }
3743
3744 #if defined(__GNUC__) || defined(__INTEL_COMPILER)
3745 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
3746 __attribute__((format(__printf__, 1, 2), __unused__));
3747 #endif
3748 static int
3749 #if defined(_MSC_VER)
3750 __inline
3751 #endif
3752 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
3753 {
3754 #if defined(NVALGRIND)
3755 return 0;
3756 #else /* NVALGRIND */
3757 #if defined(_MSC_VER)
3758 uintptr_t _qzz_res;
3759 #else
3760 unsigned long _qzz_res;
3761 #endif
3762 va_list vargs;
3763 va_start(vargs, format);
3764 #if defined(_MSC_VER)
3765 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
3766 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
3767 (uintptr_t)format,
3768 (uintptr_t)&vargs,
3769 0, 0, 0);
3770 #else
3771 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
3772 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
3773 (unsigned long)format,
3774 (unsigned long)&vargs,
3775 0, 0, 0);
3776 #endif
3777 va_end(vargs);
3778 return (int)_qzz_res;
3779 #endif /* NVALGRIND */
3780 }
3781
3782
3783 /* These requests allow control to move from the simulated CPU to the
3784 real CPU, calling an arbitary function.
3785
3786 Note that the current ThreadId is inserted as the first argument.
3787 So this call:
3788
3789 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
3790
3791 requires f to have this signature:
3792
3793 Word f(Word tid, Word arg1, Word arg2)
3794
3795 where "Word" is a word-sized type.
3796
3797 Note that these client requests are not entirely reliable. For example,
3798 if you call a function with them that subsequently calls printf(),
3799 there's a high chance Valgrind will crash. Generally, your prospects of
3800 these working are made higher if the called function does not refer to
3801 any global variables, and does not refer to any libc or other functions
3802 (printf et al). Any kind of entanglement with libc or dynamic linking is
3803 likely to have a bad outcome, for tricky reasons which we've grappled
3804 with a lot in the past.
3805 */
3806 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
3807 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
3808 VG_USERREQ__CLIENT_CALL0, \
3809 _qyy_fn, \
3810 0, 0, 0, 0)
3811
3812 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
3813 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
3814 VG_USERREQ__CLIENT_CALL1, \
3815 _qyy_fn, \
3816 _qyy_arg1, 0, 0, 0)
3817
3818 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
3819 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
3820 VG_USERREQ__CLIENT_CALL2, \
3821 _qyy_fn, \
3822 _qyy_arg1, _qyy_arg2, 0, 0)
3823
3824 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
3825 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
3826 VG_USERREQ__CLIENT_CALL3, \
3827 _qyy_fn, \
3828 _qyy_arg1, _qyy_arg2, \
3829 _qyy_arg3, 0)
3830
3831
3832 /* Counts the number of errors that have been recorded by a tool. Nb:
3833 the tool must record the errors with VG_(maybe_record_error)() or
3834 VG_(unique_error)() for them to be counted. */
3835 #define VALGRIND_COUNT_ERRORS \
3836 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
3837 0 /* default return */, \
3838 VG_USERREQ__COUNT_ERRORS, \
3839 0, 0, 0, 0, 0)
3840
3841 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
3842 when heap blocks are allocated in order to give accurate results. This
3843 happens automatically for the standard allocator functions such as
3844 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
3845 delete[], etc.
3846
3847 But if your program uses a custom allocator, this doesn't automatically
3848 happen, and Valgrind will not do as well. For example, if you allocate
3849 superblocks with mmap() and then allocates chunks of the superblocks, all
3850 Valgrind's observations will be at the mmap() level and it won't know that
3851 the chunks should be considered separate entities. In Memcheck's case,
3852 that means you probably won't get heap block overrun detection (because
3853 there won't be redzones marked as unaddressable) and you definitely won't
3854 get any leak detection.
3855
3856 The following client requests allow a custom allocator to be annotated so
3857 that it can be handled accurately by Valgrind.
3858
3859 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
3860 by a malloc()-like function. For Memcheck (an illustrative case), this
3861 does two things:
3862
3863 - It records that the block has been allocated. This means any addresses
3864 within the block mentioned in error messages will be
3865 identified as belonging to the block. It also means that if the block
3866 isn't freed it will be detected by the leak checker.
3867
3868 - It marks the block as being addressable and undefined (if 'is_zeroed' is
3869 not set), or addressable and defined (if 'is_zeroed' is set). This
3870 controls how accesses to the block by the program are handled.
3871
3872 'addr' is the start of the usable block (ie. after any
3873 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
3874 can apply redzones -- these are blocks of padding at the start and end of
3875 each block. Adding redzones is recommended as it makes it much more likely
3876 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
3877 zeroed (or filled with another predictable value), as is the case for
3878 calloc().
3879
3880 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
3881 heap block -- that will be used by the client program -- is allocated.
3882 It's best to put it at the outermost level of the allocator if possible;
3883 for example, if you have a function my_alloc() which calls
3884 internal_alloc(), and the client request is put inside internal_alloc(),
3885 stack traces relating to the heap block will contain entries for both
3886 my_alloc() and internal_alloc(), which is probably not what you want.
3887
3888 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
3889 custom blocks from within a heap block, B, that has been allocated with
3890 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
3891 -- the custom blocks will take precedence.
3892
3893 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
3894 Memcheck, it does two things:
3895
3896 - It records that the block has been deallocated. This assumes that the
3897 block was annotated as having been allocated via
3898 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
3899
3900 - It marks the block as being unaddressable.
3901
3902 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
3903 heap block is deallocated.
3904
3905 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
3906 Memcheck, it does four things:
3907
3908 - It records that the size of a block has been changed. This assumes that
3909 the block was annotated as having been allocated via
3910 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
3911
3912 - If the block shrunk, it marks the freed memory as being unaddressable.
3913
3914 - If the block grew, it marks the new area as undefined and defines a red
3915 zone past the end of the new block.
3916
3917 - The V-bits of the overlap between the old and the new block are preserved.
3918
3919 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
3920 and before deallocation of the old block.
3921
3922 In many cases, these three client requests will not be enough to get your
3923 allocator working well with Memcheck. More specifically, if your allocator
3924 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
3925 will be necessary to mark the memory as addressable just before the zeroing
3926 occurs, otherwise you'll get a lot of invalid write errors. For example,
3927 you'll need to do this if your allocator recycles freed blocks, but it
3928 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
3929 Alternatively, if your allocator reuses freed blocks for allocator-internal
3930 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
3931
3932 Really, what's happening is a blurring of the lines between the client
3933 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
3934 memory should be considered unaddressable to the client program, but the
3935 allocator knows more than the rest of the client program and so may be able
3936 to safely access it. Extra client requests are necessary for Valgrind to
3937 understand the distinction between the allocator and the rest of the
3938 program.
3939
3940 Ignored if addr == 0.
3941 */
3942 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
3943 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
3944 addr, sizeB, rzB, is_zeroed, 0)
3945
3946 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
3947 Ignored if addr == 0.
3948 */
3949 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
3950 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
3951 addr, oldSizeB, newSizeB, rzB, 0)
3952
3953 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
3954 Ignored if addr == 0.
3955 */
3956 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
3957 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
3958 addr, rzB, 0, 0, 0)
3959
3960 /* Create a memory pool. */
3961 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
3962 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
3963 pool, rzB, is_zeroed, 0, 0)
3964
3965 /* Destroy a memory pool. */
3966 #define VALGRIND_DESTROY_MEMPOOL(pool) \
3967 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
3968 pool, 0, 0, 0, 0)
3969
3970 /* Associate a piece of memory with a memory pool. */
3971 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
3972 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
3973 pool, addr, size, 0, 0)
3974
3975 /* Disassociate a piece of memory from a memory pool. */
3976 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
3977 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
3978 pool, addr, 0, 0, 0)
3979
3980 /* Disassociate any pieces outside a particular range. */
3981 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
3982 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
3983 pool, addr, size, 0, 0)
3984
3985 /* Resize and/or move a piece associated with a memory pool. */
3986 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
3987 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
3988 poolA, poolB, 0, 0, 0)
3989
3990 /* Resize and/or move a piece associated with a memory pool. */
3991 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
3992 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
3993 pool, addrA, addrB, size, 0)
3994
3995 /* Return 1 if a mempool exists, else 0. */
3996 #define VALGRIND_MEMPOOL_EXISTS(pool) \
3997 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
3998 VG_USERREQ__MEMPOOL_EXISTS, \
3999 pool, 0, 0, 0, 0)
4000
4001 /* Mark a piece of memory as being a stack. Returns a stack id. */
4002 #define VALGRIND_STACK_REGISTER(start, end) \
4003 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4004 VG_USERREQ__STACK_REGISTER, \
4005 start, end, 0, 0, 0)
4006
4007 /* Unmark the piece of memory associated with a stack id as being a
4008 stack. */
4009 #define VALGRIND_STACK_DEREGISTER(id) \
4010 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
4011 id, 0, 0, 0, 0)
4012
4013 /* Change the start and end address of the stack id. */
4014 #define VALGRIND_STACK_CHANGE(id, start, end) \
4015 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
4016 id, start, end, 0, 0)
4017
4018 /* Load PDB debug info for Wine PE image_map. */
4019 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
4020 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
4021 fd, ptr, total_size, delta, 0)
4022
4023 /* Map a code address to a source file name and line number. buf64
4024 must point to a 64-byte buffer in the caller's address space. The
4025 result will be dumped in there and is guaranteed to be zero
4026 terminated. If no info is found, the first byte is set to zero. */
4027 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
4028 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4029 VG_USERREQ__MAP_IP_TO_SRCLOC, \
4030 addr, buf64, 0, 0, 0)
4031
4032 /* Disable error reporting for this thread. Behaves in a stack like
4033 way, so you can safely call this multiple times provided that
4034 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
4035 to re-enable reporting. The first call of this macro disables
4036 reporting. Subsequent calls have no effect except to increase the
4037 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
4038 reporting. Child threads do not inherit this setting from their
4039 parents -- they are always created with reporting enabled. */
4040 #define VALGRIND_DISABLE_ERROR_REPORTING \
4041 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
4042 1, 0, 0, 0, 0)
4043
4044 /* Re-enable error reporting, as per comments on
4045 VALGRIND_DISABLE_ERROR_REPORTING. */
4046 #define VALGRIND_ENABLE_ERROR_REPORTING \
4047 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
4048 -1, 0, 0, 0, 0)
4049
4050 #undef PLAT_x86_darwin
4051 #undef PLAT_amd64_darwin
4052 #undef PLAT_x86_win32
4053 #undef PLAT_x86_linux
4054 #undef PLAT_amd64_linux
4055 #undef PLAT_ppc32_linux
4056 #undef PLAT_ppc64_linux
4057 #undef PLAT_arm_linux
4058 #undef PLAT_s390x_linux
4059
4060 #endif /* __VALGRIND_H */