1
2 /**
3 * Tencent is pleased to support the open source community by making MSEC available.
4 *
5 * Copyright (C) 2016 THL A29 Limited, a Tencent company. All rights reserved.
6 *
7 * Licensed under the GNU General Public License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License. You may
9 * obtain a copy of the License at
10 *
11 * https://opensource.org/licenses/GPL-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software distributed under the
14 * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
15 * either express or implied. See the License for the specific language governing permissions
16 * and limitations under the License.
17 */
18
19
20 /* -*- c -*-
21 ----------------------------------------------------------------
22
23 Notice that the following BSD-style license applies to this one
24 file (valgrind.h) only. The rest of Valgrind is licensed under the
25 terms of the GNU General Public License, version 2, unless
26 otherwise indicated. See the COPYING file in the source
27 distribution for details.
28
29 ----------------------------------------------------------------
30
31 This file is part of Valgrind, a dynamic binary instrumentation
32 framework.
33
34 Copyright (C) 2000-2013 Julian Seward. All rights reserved.
35
36 Redistribution and use in source and binary forms, with or without
37 modification, are permitted provided that the following conditions
38 are met:
39
40 1. Redistributions of source code must retain the above copyright
41 notice, this list of conditions and the following disclaimer.
42
43 2. The origin of this software must not be misrepresented; you must
44 not claim that you wrote the original software. If you use this
45 software in a product, an acknowledgment in the product
46 documentation would be appreciated but is not required.
47
48 3. Altered source versions must be plainly marked as such, and must
49 not be misrepresented as being the original software.
50
51 4. The name of the author may not be used to endorse or promote
52 products derived from this software without specific prior written
53 permission.
54
55 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
56 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
57 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
58 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
59 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
60 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
61 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
62 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
63 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
64 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
65 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
66
67 ----------------------------------------------------------------
68
69 Notice that the above BSD-style license applies to this one file
70 (valgrind.h) only. The entire rest of Valgrind is licensed under
71 the terms of the GNU General Public License, version 2. See the
72 COPYING file in the source distribution for details.
73
74 ----------------------------------------------------------------
75 */
76
77
78 /* This file is for inclusion into client (your!) code.
79
80 You can use these macros to manipulate and query Valgrind's
81 execution inside your own programs.
82
83 The resulting executables will still run without Valgrind, just a
84 little bit more slowly than they otherwise would, but otherwise
85 unchanged. When not running on valgrind, each client request
86 consumes very few (eg. 7) instructions, so the resulting performance
87 loss is negligible unless you plan to execute client requests
88 millions of times per second. Nevertheless, if that is still a
89 problem, you can compile with the NVALGRIND symbol defined (gcc
90 -DNVALGRIND) so that client requests are not even compiled in. */
91
92 #ifndef __VALGRIND_H
93 #define __VALGRIND_H
94
95
96 /* ------------------------------------------------------------------ */
97 /* VERSION NUMBER OF VALGRIND */
98 /* ------------------------------------------------------------------ */
99
100 /* Specify Valgrind's version number, so that user code can
101 conditionally compile based on our version number. Note that these
102 were introduced at version 3.6 and so do not exist in version 3.5
103 or earlier. The recommended way to use them to check for "version
104 X.Y or later" is (eg)
105
106 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
107 && (__VALGRIND_MAJOR__ > 3 \
108 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
109 */
110 #define __VALGRIND_MAJOR__ 3
111 #define __VALGRIND_MINOR__ 10
112
113
114 #include <stdarg.h>
115
116 /* Nb: this file might be included in a file compiled with -ansi. So
117 we can't use C++ style "//" comments nor the "asm" keyword (instead
118 use "__asm__"). */
119
120 /* Derive some tags indicating what the target platform is. Note
121 that in this file we're using the compiler's CPP symbols for
122 identifying architectures, which are different to the ones we use
123 within the rest of Valgrind. Note, __powerpc__ is active for both
124 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
125 latter (on Linux, that is).
126
127 Misc note: how to find out what's predefined in gcc by default:
128 gcc -Wp,-dM somefile.c
129 */
130 #undef PLAT_x86_darwin
131 #undef PLAT_amd64_darwin
132 #undef PLAT_x86_win32
133 #undef PLAT_amd64_win64
134 #undef PLAT_x86_linux
135 #undef PLAT_amd64_linux
136 #undef PLAT_ppc32_linux
137 #undef PLAT_ppc64be_linux
138 #undef PLAT_ppc64le_linux
139 #undef PLAT_arm_linux
140 #undef PLAT_arm64_linux
141 #undef PLAT_s390x_linux
142 #undef PLAT_mips32_linux
143 #undef PLAT_mips64_linux
144
145
146 #if defined(__APPLE__) && defined(__i386__)
147 # define PLAT_x86_darwin 1
148 #elif defined(__APPLE__) && defined(__x86_64__)
149 # define PLAT_amd64_darwin 1
150 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
151 || defined(__CYGWIN32__) \
152 || (defined(_WIN32) && defined(_M_IX86))
153 # define PLAT_x86_win32 1
154 #elif defined(__MINGW64__) \
155 || (defined(_WIN64) && defined(_M_X64))
156 # define PLAT_amd64_win64 1
157 #elif defined(__linux__) && defined(__i386__)
158 # define PLAT_x86_linux 1
159 #elif defined(__linux__) && defined(__x86_64__)
160 # define PLAT_amd64_linux 1
161 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
162 # define PLAT_ppc32_linux 1
163 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
164 /* Big Endian uses ELF version 1 */
165 # define PLAT_ppc64be_linux 1
166 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
167 /* Little Endian uses ELF version 2 */
168 # define PLAT_ppc64le_linux 1
169 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
170 # define PLAT_arm_linux 1
171 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
172 # define PLAT_arm64_linux 1
173 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
174 # define PLAT_s390x_linux 1
175 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
176 # define PLAT_mips64_linux 1
177 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
178 # define PLAT_mips32_linux 1
179 #else
180 /* If we're not compiling for our target platform, don't generate
181 any inline asms. */
182 # if !defined(NVALGRIND)
183 # define NVALGRIND 1
184 # endif
185 #endif
186
187
188 /* ------------------------------------------------------------------ */
189 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
190 /* in here of use to end-users -- skip to the next section. */
191 /* ------------------------------------------------------------------ */
192
193 /*
194 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
195 * request. Accepts both pointers and integers as arguments.
196 *
197 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
198 * client request that does not return a value.
199
200 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
201 * client request and whose value equals the client request result. Accepts
202 * both pointers and integers as arguments. Note that such calls are not
203 * necessarily pure functions -- they may have side effects.
204 */
205
206 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
207 _zzq_request, _zzq_arg1, _zzq_arg2, \
208 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
209 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
210 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
211 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
212
213 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
214 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
215 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
216 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
217 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
218
219 #if defined(NVALGRIND)
220
221 /* Define NVALGRIND to completely remove the Valgrind magic sequence
222 from the compiled code (analogous to NDEBUG's effects on
223 assert()) */
224 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
225 _zzq_default, _zzq_request, \
226 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
227 (_zzq_default)
228
229 #else /* ! NVALGRIND */
230
231 /* The following defines the magic code sequences which the JITter
232 spots and handles magically. Don't look too closely at them as
233 they will rot your brain.
234
235 The assembly code sequences for all architectures is in this one
236 file. This is because this file must be stand-alone, and we don't
237 want to have multiple files.
238
239 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
240 value gets put in the return slot, so that everything works when
241 this is executed not under Valgrind. Args are passed in a memory
242 block, and so there's no intrinsic limit to the number that could
243 be passed, but it's currently five.
244
245 The macro args are:
246 _zzq_rlval result lvalue
247 _zzq_default default value (result returned when running on real CPU)
248 _zzq_request request code
249 _zzq_arg1..5 request params
250
251 The other two macros are used to support function wrapping, and are
252 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
253 guest's NRADDR pseudo-register and whatever other information is
254 needed to safely run the call original from the wrapper: on
255 ppc64-linux, the R2 value at the divert point is also needed. This
256 information is abstracted into a user-visible type, OrigFn.
257
258 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
259 guest, but guarantees that the branch instruction will not be
260 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
261 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
262 complete inline asm, since it needs to be combined with more magic
263 inline asm stuff to be useful.
264 */
265
266 /* ------------------------- x86-{linux,darwin} ---------------- */
267
268 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
269 || (defined(PLAT_x86_win32) && defined(__GNUC__))
270
271 typedef
272 struct {
273 unsigned int nraddr; /* where's the code? */
274 }
275 OrigFn;
276
277 #define __SPECIAL_INSTRUCTION_PREAMBLE \
278 "roll $3, %%edi ; roll $13, %%edi\n\t" \
279 "roll $29, %%edi ; roll $19, %%edi\n\t"
280
281 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
282 _zzq_default, _zzq_request, \
283 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
284 __extension__ \
285 ({volatile unsigned int _zzq_args[6]; \
286 volatile unsigned int _zzq_result; \
287 _zzq_args[0] = (unsigned int)(_zzq_request); \
288 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
289 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
290 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
291 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
292 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
293 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
294 /* %EDX = client_request ( %EAX ) */ \
295 "xchgl %%ebx,%%ebx" \
296 : "=d" (_zzq_result) \
297 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
298 : "cc", "memory" \
299 ); \
300 _zzq_result; \
301 })
302
303 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
304 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
305 volatile unsigned int __addr; \
306 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
307 /* %EAX = guest_NRADDR */ \
308 "xchgl %%ecx,%%ecx" \
309 : "=a" (__addr) \
310 : \
311 : "cc", "memory" \
312 ); \
313 _zzq_orig->nraddr = __addr; \
314 }
315
316 #define VALGRIND_CALL_NOREDIR_EAX \
317 __SPECIAL_INSTRUCTION_PREAMBLE \
318 /* call-noredir *%EAX */ \
319 "xchgl %%edx,%%edx\n\t"
320
321 #define VALGRIND_VEX_INJECT_IR() \
322 do { \
323 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
324 "xchgl %%edi,%%edi\n\t" \
325 : : : "cc", "memory" \
326 ); \
327 } while (0)
328
329 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
330
331 /* ------------------------- x86-Win32 ------------------------- */
332
333 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
334
335 typedef
336 struct {
337 unsigned int nraddr; /* where's the code? */
338 }
339 OrigFn;
340
341 #if defined(_MSC_VER)
342
343 #define __SPECIAL_INSTRUCTION_PREAMBLE \
344 __asm rol edi, 3 __asm rol edi, 13 \
345 __asm rol edi, 29 __asm rol edi, 19
346
347 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
348 _zzq_default, _zzq_request, \
349 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
350 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
351 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
352 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
353 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
354
355 static __inline uintptr_t
valgrind_do_client_request_expr(uintptr_t _zzq_default,uintptr_t _zzq_request,uintptr_t _zzq_arg1,uintptr_t _zzq_arg2,uintptr_t _zzq_arg3,uintptr_t _zzq_arg4,uintptr_t _zzq_arg5)356 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
357 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
358 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
359 uintptr_t _zzq_arg5)
360 {
361 volatile uintptr_t _zzq_args[6];
362 volatile unsigned int _zzq_result;
363 _zzq_args[0] = (uintptr_t)(_zzq_request);
364 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
365 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
366 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
367 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
368 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
369 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
370 __SPECIAL_INSTRUCTION_PREAMBLE
371 /* %EDX = client_request ( %EAX ) */
372 __asm xchg ebx,ebx
373 __asm mov _zzq_result, edx
374 }
375 return _zzq_result;
376 }
377
378 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
379 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
380 volatile unsigned int __addr; \
381 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
382 /* %EAX = guest_NRADDR */ \
383 __asm xchg ecx,ecx \
384 __asm mov __addr, eax \
385 } \
386 _zzq_orig->nraddr = __addr; \
387 }
388
389 #define VALGRIND_CALL_NOREDIR_EAX ERROR
390
391 #define VALGRIND_VEX_INJECT_IR() \
392 do { \
393 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
394 __asm xchg edi,edi \
395 } \
396 } while (0)
397
398 #else
399 #error Unsupported compiler.
400 #endif
401
402 #endif /* PLAT_x86_win32 */
403
404 /* ------------------------ amd64-{linux,darwin} --------------- */
405
406 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
407 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
408
409 typedef
410 struct {
411 unsigned long long int nraddr; /* where's the code? */
412 }
413 OrigFn;
414
415 #define __SPECIAL_INSTRUCTION_PREAMBLE \
416 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
417 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
418
419 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
420 _zzq_default, _zzq_request, \
421 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
422 __extension__ \
423 ({ volatile unsigned long long int _zzq_args[6]; \
424 volatile unsigned long long int _zzq_result; \
425 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
426 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
427 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
428 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
429 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
430 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
431 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
432 /* %RDX = client_request ( %RAX ) */ \
433 "xchgq %%rbx,%%rbx" \
434 : "=d" (_zzq_result) \
435 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
436 : "cc", "memory" \
437 ); \
438 _zzq_result; \
439 })
440
441 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
442 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
443 volatile unsigned long long int __addr; \
444 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
445 /* %RAX = guest_NRADDR */ \
446 "xchgq %%rcx,%%rcx" \
447 : "=a" (__addr) \
448 : \
449 : "cc", "memory" \
450 ); \
451 _zzq_orig->nraddr = __addr; \
452 }
453
454 #define VALGRIND_CALL_NOREDIR_RAX \
455 __SPECIAL_INSTRUCTION_PREAMBLE \
456 /* call-noredir *%RAX */ \
457 "xchgq %%rdx,%%rdx\n\t"
458
459 #define VALGRIND_VEX_INJECT_IR() \
460 do { \
461 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
462 "xchgq %%rdi,%%rdi\n\t" \
463 : : : "cc", "memory" \
464 ); \
465 } while (0)
466
467 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
468
469 /* ------------------------- amd64-Win64 ------------------------- */
470
471 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
472
473 #error Unsupported compiler.
474
475 #endif /* PLAT_amd64_win64 */
476
477 /* ------------------------ ppc32-linux ------------------------ */
478
479 #if defined(PLAT_ppc32_linux)
480
481 typedef
482 struct {
483 unsigned int nraddr; /* where's the code? */
484 }
485 OrigFn;
486
487 #define __SPECIAL_INSTRUCTION_PREAMBLE \
488 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
489 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
490
491 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
492 _zzq_default, _zzq_request, \
493 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
494 \
495 __extension__ \
496 ({ unsigned int _zzq_args[6]; \
497 unsigned int _zzq_result; \
498 unsigned int* _zzq_ptr; \
499 _zzq_args[0] = (unsigned int)(_zzq_request); \
500 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
501 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
502 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
503 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
504 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
505 _zzq_ptr = _zzq_args; \
506 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
507 "mr 4,%2\n\t" /*ptr*/ \
508 __SPECIAL_INSTRUCTION_PREAMBLE \
509 /* %R3 = client_request ( %R4 ) */ \
510 "or 1,1,1\n\t" \
511 "mr %0,3" /*result*/ \
512 : "=b" (_zzq_result) \
513 : "b" (_zzq_default), "b" (_zzq_ptr) \
514 : "cc", "memory", "r3", "r4"); \
515 _zzq_result; \
516 })
517
518 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
519 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
520 unsigned int __addr; \
521 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
522 /* %R3 = guest_NRADDR */ \
523 "or 2,2,2\n\t" \
524 "mr %0,3" \
525 : "=b" (__addr) \
526 : \
527 : "cc", "memory", "r3" \
528 ); \
529 _zzq_orig->nraddr = __addr; \
530 }
531
532 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
533 __SPECIAL_INSTRUCTION_PREAMBLE \
534 /* branch-and-link-to-noredir *%R11 */ \
535 "or 3,3,3\n\t"
536
537 #define VALGRIND_VEX_INJECT_IR() \
538 do { \
539 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
540 "or 5,5,5\n\t" \
541 ); \
542 } while (0)
543
544 #endif /* PLAT_ppc32_linux */
545
546 /* ------------------------ ppc64-linux ------------------------ */
547
548 #if defined(PLAT_ppc64be_linux)
549
550 typedef
551 struct {
552 unsigned long long int nraddr; /* where's the code? */
553 unsigned long long int r2; /* what tocptr do we need? */
554 }
555 OrigFn;
556
557 #define __SPECIAL_INSTRUCTION_PREAMBLE \
558 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
559 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
560
561 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
562 _zzq_default, _zzq_request, \
563 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
564 \
565 __extension__ \
566 ({ unsigned long long int _zzq_args[6]; \
567 unsigned long long int _zzq_result; \
568 unsigned long long int* _zzq_ptr; \
569 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
570 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
571 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
572 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
573 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
574 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
575 _zzq_ptr = _zzq_args; \
576 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
577 "mr 4,%2\n\t" /*ptr*/ \
578 __SPECIAL_INSTRUCTION_PREAMBLE \
579 /* %R3 = client_request ( %R4 ) */ \
580 "or 1,1,1\n\t" \
581 "mr %0,3" /*result*/ \
582 : "=b" (_zzq_result) \
583 : "b" (_zzq_default), "b" (_zzq_ptr) \
584 : "cc", "memory", "r3", "r4"); \
585 _zzq_result; \
586 })
587
588 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
589 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
590 unsigned long long int __addr; \
591 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
592 /* %R3 = guest_NRADDR */ \
593 "or 2,2,2\n\t" \
594 "mr %0,3" \
595 : "=b" (__addr) \
596 : \
597 : "cc", "memory", "r3" \
598 ); \
599 _zzq_orig->nraddr = __addr; \
600 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
601 /* %R3 = guest_NRADDR_GPR2 */ \
602 "or 4,4,4\n\t" \
603 "mr %0,3" \
604 : "=b" (__addr) \
605 : \
606 : "cc", "memory", "r3" \
607 ); \
608 _zzq_orig->r2 = __addr; \
609 }
610
611 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
612 __SPECIAL_INSTRUCTION_PREAMBLE \
613 /* branch-and-link-to-noredir *%R11 */ \
614 "or 3,3,3\n\t"
615
616 #define VALGRIND_VEX_INJECT_IR() \
617 do { \
618 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
619 "or 5,5,5\n\t" \
620 ); \
621 } while (0)
622
623 #endif /* PLAT_ppc64be_linux */
624
625 #if defined(PLAT_ppc64le_linux)
626
627 typedef
628 struct {
629 unsigned long long int nraddr; /* where's the code? */
630 unsigned long long int r2; /* what tocptr do we need? */
631 }
632 OrigFn;
633
634 #define __SPECIAL_INSTRUCTION_PREAMBLE \
635 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
636 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
637
638 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
639 _zzq_default, _zzq_request, \
640 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
641 \
642 __extension__ \
643 ({ unsigned long long int _zzq_args[6]; \
644 unsigned long long int _zzq_result; \
645 unsigned long long int* _zzq_ptr; \
646 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
647 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
648 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
649 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
650 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
651 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
652 _zzq_ptr = _zzq_args; \
653 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
654 "mr 4,%2\n\t" /*ptr*/ \
655 __SPECIAL_INSTRUCTION_PREAMBLE \
656 /* %R3 = client_request ( %R4 ) */ \
657 "or 1,1,1\n\t" \
658 "mr %0,3" /*result*/ \
659 : "=b" (_zzq_result) \
660 : "b" (_zzq_default), "b" (_zzq_ptr) \
661 : "cc", "memory", "r3", "r4"); \
662 _zzq_result; \
663 })
664
665 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
666 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
667 unsigned long long int __addr; \
668 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
669 /* %R3 = guest_NRADDR */ \
670 "or 2,2,2\n\t" \
671 "mr %0,3" \
672 : "=b" (__addr) \
673 : \
674 : "cc", "memory", "r3" \
675 ); \
676 _zzq_orig->nraddr = __addr; \
677 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
678 /* %R3 = guest_NRADDR_GPR2 */ \
679 "or 4,4,4\n\t" \
680 "mr %0,3" \
681 : "=b" (__addr) \
682 : \
683 : "cc", "memory", "r3" \
684 ); \
685 _zzq_orig->r2 = __addr; \
686 }
687
688 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
689 __SPECIAL_INSTRUCTION_PREAMBLE \
690 /* branch-and-link-to-noredir *%R12 */ \
691 "or 3,3,3\n\t"
692
693 #define VALGRIND_VEX_INJECT_IR() \
694 do { \
695 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
696 "or 5,5,5\n\t" \
697 ); \
698 } while (0)
699
700 #endif /* PLAT_ppc64le_linux */
701
702 /* ------------------------- arm-linux ------------------------- */
703
704 #if defined(PLAT_arm_linux)
705
706 typedef
707 struct {
708 unsigned int nraddr; /* where's the code? */
709 }
710 OrigFn;
711
712 #define __SPECIAL_INSTRUCTION_PREAMBLE \
713 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
714 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
715
716 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
717 _zzq_default, _zzq_request, \
718 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
719 \
720 __extension__ \
721 ({volatile unsigned int _zzq_args[6]; \
722 volatile unsigned int _zzq_result; \
723 _zzq_args[0] = (unsigned int)(_zzq_request); \
724 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
725 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
726 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
727 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
728 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
729 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
730 "mov r4, %2\n\t" /*ptr*/ \
731 __SPECIAL_INSTRUCTION_PREAMBLE \
732 /* R3 = client_request ( R4 ) */ \
733 "orr r10, r10, r10\n\t" \
734 "mov %0, r3" /*result*/ \
735 : "=r" (_zzq_result) \
736 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
737 : "cc","memory", "r3", "r4"); \
738 _zzq_result; \
739 })
740
741 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
742 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
743 unsigned int __addr; \
744 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
745 /* R3 = guest_NRADDR */ \
746 "orr r11, r11, r11\n\t" \
747 "mov %0, r3" \
748 : "=r" (__addr) \
749 : \
750 : "cc", "memory", "r3" \
751 ); \
752 _zzq_orig->nraddr = __addr; \
753 }
754
755 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
756 __SPECIAL_INSTRUCTION_PREAMBLE \
757 /* branch-and-link-to-noredir *%R4 */ \
758 "orr r12, r12, r12\n\t"
759
760 #define VALGRIND_VEX_INJECT_IR() \
761 do { \
762 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
763 "orr r9, r9, r9\n\t" \
764 : : : "cc", "memory" \
765 ); \
766 } while (0)
767
768 #endif /* PLAT_arm_linux */
769
770 /* ------------------------ arm64-linux ------------------------- */
771
772 #if defined(PLAT_arm64_linux)
773
774 typedef
775 struct {
776 unsigned long long int nraddr; /* where's the code? */
777 }
778 OrigFn;
779
780 #define __SPECIAL_INSTRUCTION_PREAMBLE \
781 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
782 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
783
784 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
785 _zzq_default, _zzq_request, \
786 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
787 \
788 __extension__ \
789 ({volatile unsigned long long int _zzq_args[6]; \
790 volatile unsigned long long int _zzq_result; \
791 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
792 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
793 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
794 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
795 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
796 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
797 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
798 "mov x4, %2\n\t" /*ptr*/ \
799 __SPECIAL_INSTRUCTION_PREAMBLE \
800 /* X3 = client_request ( X4 ) */ \
801 "orr x10, x10, x10\n\t" \
802 "mov %0, x3" /*result*/ \
803 : "=r" (_zzq_result) \
804 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
805 : "cc","memory", "x3", "x4"); \
806 _zzq_result; \
807 })
808
809 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
810 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
811 unsigned long long int __addr; \
812 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
813 /* X3 = guest_NRADDR */ \
814 "orr x11, x11, x11\n\t" \
815 "mov %0, x3" \
816 : "=r" (__addr) \
817 : \
818 : "cc", "memory", "x3" \
819 ); \
820 _zzq_orig->nraddr = __addr; \
821 }
822
823 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
824 __SPECIAL_INSTRUCTION_PREAMBLE \
825 /* branch-and-link-to-noredir X8 */ \
826 "orr x12, x12, x12\n\t"
827
828 #define VALGRIND_VEX_INJECT_IR() \
829 do { \
830 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
831 "orr x9, x9, x9\n\t" \
832 : : : "cc", "memory" \
833 ); \
834 } while (0)
835
836 #endif /* PLAT_arm64_linux */
837
838 /* ------------------------ s390x-linux ------------------------ */
839
840 #if defined(PLAT_s390x_linux)
841
842 typedef
843 struct {
844 unsigned long long int nraddr; /* where's the code? */
845 }
846 OrigFn;
847
848 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
849 * code. This detection is implemented in platform specific toIR.c
850 * (e.g. VEX/priv/guest_s390_decoder.c).
851 */
852 #define __SPECIAL_INSTRUCTION_PREAMBLE \
853 "lr 15,15\n\t" \
854 "lr 1,1\n\t" \
855 "lr 2,2\n\t" \
856 "lr 3,3\n\t"
857
858 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
859 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
860 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
861 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
862
863 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
864 _zzq_default, _zzq_request, \
865 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
866 __extension__ \
867 ({volatile unsigned long long int _zzq_args[6]; \
868 volatile unsigned long long int _zzq_result; \
869 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
870 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
871 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
872 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
873 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
874 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
875 __asm__ volatile(/* r2 = args */ \
876 "lgr 2,%1\n\t" \
877 /* r3 = default */ \
878 "lgr 3,%2\n\t" \
879 __SPECIAL_INSTRUCTION_PREAMBLE \
880 __CLIENT_REQUEST_CODE \
881 /* results = r3 */ \
882 "lgr %0, 3\n\t" \
883 : "=d" (_zzq_result) \
884 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
885 : "cc", "2", "3", "memory" \
886 ); \
887 _zzq_result; \
888 })
889
890 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
891 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
892 volatile unsigned long long int __addr; \
893 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
894 __GET_NR_CONTEXT_CODE \
895 "lgr %0, 3\n\t" \
896 : "=a" (__addr) \
897 : \
898 : "cc", "3", "memory" \
899 ); \
900 _zzq_orig->nraddr = __addr; \
901 }
902
903 #define VALGRIND_CALL_NOREDIR_R1 \
904 __SPECIAL_INSTRUCTION_PREAMBLE \
905 __CALL_NO_REDIR_CODE
906
907 #define VALGRIND_VEX_INJECT_IR() \
908 do { \
909 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
910 __VEX_INJECT_IR_CODE); \
911 } while (0)
912
913 #endif /* PLAT_s390x_linux */
914
915 /* ------------------------- mips32-linux ---------------- */
916
917 #if defined(PLAT_mips32_linux)
918
919 typedef
920 struct {
921 unsigned int nraddr; /* where's the code? */
922 }
923 OrigFn;
924
925 /* .word 0x342
926 * .word 0x742
927 * .word 0xC2
928 * .word 0x4C2*/
929 #define __SPECIAL_INSTRUCTION_PREAMBLE \
930 "srl $0, $0, 13\n\t" \
931 "srl $0, $0, 29\n\t" \
932 "srl $0, $0, 3\n\t" \
933 "srl $0, $0, 19\n\t"
934
935 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
936 _zzq_default, _zzq_request, \
937 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
938 __extension__ \
939 ({ volatile unsigned int _zzq_args[6]; \
940 volatile unsigned int _zzq_result; \
941 _zzq_args[0] = (unsigned int)(_zzq_request); \
942 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
943 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
944 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
945 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
946 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
947 __asm__ volatile("move $11, %1\n\t" /*default*/ \
948 "move $12, %2\n\t" /*ptr*/ \
949 __SPECIAL_INSTRUCTION_PREAMBLE \
950 /* T3 = client_request ( T4 ) */ \
951 "or $13, $13, $13\n\t" \
952 "move %0, $11\n\t" /*result*/ \
953 : "=r" (_zzq_result) \
954 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
955 : "$11", "$12"); \
956 _zzq_result; \
957 })
958
959 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
960 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
961 volatile unsigned int __addr; \
962 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
963 /* %t9 = guest_NRADDR */ \
964 "or $14, $14, $14\n\t" \
965 "move %0, $11" /*result*/ \
966 : "=r" (__addr) \
967 : \
968 : "$11" \
969 ); \
970 _zzq_orig->nraddr = __addr; \
971 }
972
973 #define VALGRIND_CALL_NOREDIR_T9 \
974 __SPECIAL_INSTRUCTION_PREAMBLE \
975 /* call-noredir *%t9 */ \
976 "or $15, $15, $15\n\t"
977
978 #define VALGRIND_VEX_INJECT_IR() \
979 do { \
980 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
981 "or $11, $11, $11\n\t" \
982 ); \
983 } while (0)
984
985
986 #endif /* PLAT_mips32_linux */
987
988 /* ------------------------- mips64-linux ---------------- */
989
990 #if defined(PLAT_mips64_linux)
991
992 typedef
993 struct {
994 unsigned long long nraddr; /* where's the code? */
995 }
996 OrigFn;
997
998 /* dsll $0,$0, 3
999 * dsll $0,$0, 13
1000 * dsll $0,$0, 29
1001 * dsll $0,$0, 19*/
1002 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1003 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
1004 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1005
1006 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1007 _zzq_default, _zzq_request, \
1008 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1009 __extension__ \
1010 ({ volatile unsigned long long int _zzq_args[6]; \
1011 volatile unsigned long long int _zzq_result; \
1012 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
1013 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
1014 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
1015 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
1016 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
1017 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
1018 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1019 "move $12, %2\n\t" /*ptr*/ \
1020 __SPECIAL_INSTRUCTION_PREAMBLE \
1021 /* $11 = client_request ( $12 ) */ \
1022 "or $13, $13, $13\n\t" \
1023 "move %0, $11\n\t" /*result*/ \
1024 : "=r" (_zzq_result) \
1025 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1026 : "$11", "$12"); \
1027 _zzq_result; \
1028 })
1029
1030 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1031 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1032 volatile unsigned long long int __addr; \
1033 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1034 /* $11 = guest_NRADDR */ \
1035 "or $14, $14, $14\n\t" \
1036 "move %0, $11" /*result*/ \
1037 : "=r" (__addr) \
1038 : \
1039 : "$11"); \
1040 _zzq_orig->nraddr = __addr; \
1041 }
1042
1043 #define VALGRIND_CALL_NOREDIR_T9 \
1044 __SPECIAL_INSTRUCTION_PREAMBLE \
1045 /* call-noredir $25 */ \
1046 "or $15, $15, $15\n\t"
1047
1048 #define VALGRIND_VEX_INJECT_IR() \
1049 do { \
1050 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1051 "or $11, $11, $11\n\t" \
1052 ); \
1053 } while (0)
1054
1055 #endif /* PLAT_mips64_linux */
1056
1057 /* Insert assembly code for other platforms here... */
1058
1059 #endif /* NVALGRIND */
1060
1061
1062 /* ------------------------------------------------------------------ */
1063 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1064 /* ugly. It's the least-worst tradeoff I can think of. */
1065 /* ------------------------------------------------------------------ */
1066
1067 /* This section defines magic (a.k.a appalling-hack) macros for doing
1068 guaranteed-no-redirection macros, so as to get from function
1069 wrappers to the functions they are wrapping. The whole point is to
1070 construct standard call sequences, but to do the call itself with a
1071 special no-redirect call pseudo-instruction that the JIT
1072 understands and handles specially. This section is long and
1073 repetitious, and I can't see a way to make it shorter.
1074
1075 The naming scheme is as follows:
1076
1077 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1078
1079 'W' stands for "word" and 'v' for "void". Hence there are
1080 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1081 and for each, the possibility of returning a word-typed result, or
1082 no result.
1083 */
1084
1085 /* Use these to write the name of your wrapper. NOTE: duplicates
1086 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1087 the default behaviour equivalance class tag "0000" into the name.
1088 See pub_tool_redir.h for details -- normally you don't need to
1089 think about this, though. */
1090
1091 /* Use an extra level of macroisation so as to ensure the soname/fnname
1092 args are fully macro-expanded before pasting them together. */
1093 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1094
1095 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1096 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1097
1098 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1099 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1100
1101 /* Use this macro from within a wrapper function to collect the
1102 context (address and possibly other info) of the original function.
1103 Once you have that you can then use it in one of the CALL_FN_
1104 macros. The type of the argument _lval is OrigFn. */
1105 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1106
1107 /* Also provide end-user facilities for function replacement, rather
1108 than wrapping. A replacement function differs from a wrapper in
1109 that it has no way to get hold of the original function being
1110 called, and hence no way to call onwards to it. In a replacement
1111 function, VALGRIND_GET_ORIG_FN always returns zero. */
1112
1113 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1114 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1115
1116 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1117 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1118
1119 /* Derivatives of the main macros below, for calling functions
1120 returning void. */
1121
1122 #define CALL_FN_v_v(fnptr) \
1123 do { volatile unsigned long _junk; \
1124 CALL_FN_W_v(_junk,fnptr); } while (0)
1125
1126 #define CALL_FN_v_W(fnptr, arg1) \
1127 do { volatile unsigned long _junk; \
1128 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1129
1130 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1131 do { volatile unsigned long _junk; \
1132 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1133
1134 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1135 do { volatile unsigned long _junk; \
1136 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1137
1138 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1139 do { volatile unsigned long _junk; \
1140 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1141
1142 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1143 do { volatile unsigned long _junk; \
1144 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1145
1146 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1147 do { volatile unsigned long _junk; \
1148 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1149
1150 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1151 do { volatile unsigned long _junk; \
1152 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1153
1154 /* ------------------------- x86-{linux,darwin} ---------------- */
1155
1156 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1157
1158 /* These regs are trashed by the hidden call. No need to mention eax
1159 as gcc can already see that, plus causes gcc to bomb. */
1160 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1161
1162 /* Macros to save and align the stack before making a function
1163 call and restore it afterwards as gcc may not keep the stack
1164 pointer aligned if it doesn't realise calls are being made
1165 to other functions. */
1166
1167 #define VALGRIND_ALIGN_STACK \
1168 "movl %%esp,%%edi\n\t" \
1169 "andl $0xfffffff0,%%esp\n\t"
1170 #define VALGRIND_RESTORE_STACK \
1171 "movl %%edi,%%esp\n\t"
1172
1173 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1174 long) == 4. */
1175
1176 #define CALL_FN_W_v(lval, orig) \
1177 do { \
1178 volatile OrigFn _orig = (orig); \
1179 volatile unsigned long _argvec[1]; \
1180 volatile unsigned long _res; \
1181 _argvec[0] = (unsigned long)_orig.nraddr; \
1182 __asm__ volatile( \
1183 VALGRIND_ALIGN_STACK \
1184 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1185 VALGRIND_CALL_NOREDIR_EAX \
1186 VALGRIND_RESTORE_STACK \
1187 : /*out*/ "=a" (_res) \
1188 : /*in*/ "a" (&_argvec[0]) \
1189 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1190 ); \
1191 lval = (__typeof__(lval)) _res; \
1192 } while (0)
1193
1194 #define CALL_FN_W_W(lval, orig, arg1) \
1195 do { \
1196 volatile OrigFn _orig = (orig); \
1197 volatile unsigned long _argvec[2]; \
1198 volatile unsigned long _res; \
1199 _argvec[0] = (unsigned long)_orig.nraddr; \
1200 _argvec[1] = (unsigned long)(arg1); \
1201 __asm__ volatile( \
1202 VALGRIND_ALIGN_STACK \
1203 "subl $12, %%esp\n\t" \
1204 "pushl 4(%%eax)\n\t" \
1205 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1206 VALGRIND_CALL_NOREDIR_EAX \
1207 VALGRIND_RESTORE_STACK \
1208 : /*out*/ "=a" (_res) \
1209 : /*in*/ "a" (&_argvec[0]) \
1210 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1211 ); \
1212 lval = (__typeof__(lval)) _res; \
1213 } while (0)
1214
1215 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1216 do { \
1217 volatile OrigFn _orig = (orig); \
1218 volatile unsigned long _argvec[3]; \
1219 volatile unsigned long _res; \
1220 _argvec[0] = (unsigned long)_orig.nraddr; \
1221 _argvec[1] = (unsigned long)(arg1); \
1222 _argvec[2] = (unsigned long)(arg2); \
1223 __asm__ volatile( \
1224 VALGRIND_ALIGN_STACK \
1225 "subl $8, %%esp\n\t" \
1226 "pushl 8(%%eax)\n\t" \
1227 "pushl 4(%%eax)\n\t" \
1228 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1229 VALGRIND_CALL_NOREDIR_EAX \
1230 VALGRIND_RESTORE_STACK \
1231 : /*out*/ "=a" (_res) \
1232 : /*in*/ "a" (&_argvec[0]) \
1233 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1234 ); \
1235 lval = (__typeof__(lval)) _res; \
1236 } while (0)
1237
1238 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1239 do { \
1240 volatile OrigFn _orig = (orig); \
1241 volatile unsigned long _argvec[4]; \
1242 volatile unsigned long _res; \
1243 _argvec[0] = (unsigned long)_orig.nraddr; \
1244 _argvec[1] = (unsigned long)(arg1); \
1245 _argvec[2] = (unsigned long)(arg2); \
1246 _argvec[3] = (unsigned long)(arg3); \
1247 __asm__ volatile( \
1248 VALGRIND_ALIGN_STACK \
1249 "subl $4, %%esp\n\t" \
1250 "pushl 12(%%eax)\n\t" \
1251 "pushl 8(%%eax)\n\t" \
1252 "pushl 4(%%eax)\n\t" \
1253 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1254 VALGRIND_CALL_NOREDIR_EAX \
1255 VALGRIND_RESTORE_STACK \
1256 : /*out*/ "=a" (_res) \
1257 : /*in*/ "a" (&_argvec[0]) \
1258 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1259 ); \
1260 lval = (__typeof__(lval)) _res; \
1261 } while (0)
1262
1263 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1264 do { \
1265 volatile OrigFn _orig = (orig); \
1266 volatile unsigned long _argvec[5]; \
1267 volatile unsigned long _res; \
1268 _argvec[0] = (unsigned long)_orig.nraddr; \
1269 _argvec[1] = (unsigned long)(arg1); \
1270 _argvec[2] = (unsigned long)(arg2); \
1271 _argvec[3] = (unsigned long)(arg3); \
1272 _argvec[4] = (unsigned long)(arg4); \
1273 __asm__ volatile( \
1274 VALGRIND_ALIGN_STACK \
1275 "pushl 16(%%eax)\n\t" \
1276 "pushl 12(%%eax)\n\t" \
1277 "pushl 8(%%eax)\n\t" \
1278 "pushl 4(%%eax)\n\t" \
1279 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1280 VALGRIND_CALL_NOREDIR_EAX \
1281 VALGRIND_RESTORE_STACK \
1282 : /*out*/ "=a" (_res) \
1283 : /*in*/ "a" (&_argvec[0]) \
1284 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1285 ); \
1286 lval = (__typeof__(lval)) _res; \
1287 } while (0)
1288
1289 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1290 do { \
1291 volatile OrigFn _orig = (orig); \
1292 volatile unsigned long _argvec[6]; \
1293 volatile unsigned long _res; \
1294 _argvec[0] = (unsigned long)_orig.nraddr; \
1295 _argvec[1] = (unsigned long)(arg1); \
1296 _argvec[2] = (unsigned long)(arg2); \
1297 _argvec[3] = (unsigned long)(arg3); \
1298 _argvec[4] = (unsigned long)(arg4); \
1299 _argvec[5] = (unsigned long)(arg5); \
1300 __asm__ volatile( \
1301 VALGRIND_ALIGN_STACK \
1302 "subl $12, %%esp\n\t" \
1303 "pushl 20(%%eax)\n\t" \
1304 "pushl 16(%%eax)\n\t" \
1305 "pushl 12(%%eax)\n\t" \
1306 "pushl 8(%%eax)\n\t" \
1307 "pushl 4(%%eax)\n\t" \
1308 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1309 VALGRIND_CALL_NOREDIR_EAX \
1310 VALGRIND_RESTORE_STACK \
1311 : /*out*/ "=a" (_res) \
1312 : /*in*/ "a" (&_argvec[0]) \
1313 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1314 ); \
1315 lval = (__typeof__(lval)) _res; \
1316 } while (0)
1317
1318 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1319 do { \
1320 volatile OrigFn _orig = (orig); \
1321 volatile unsigned long _argvec[7]; \
1322 volatile unsigned long _res; \
1323 _argvec[0] = (unsigned long)_orig.nraddr; \
1324 _argvec[1] = (unsigned long)(arg1); \
1325 _argvec[2] = (unsigned long)(arg2); \
1326 _argvec[3] = (unsigned long)(arg3); \
1327 _argvec[4] = (unsigned long)(arg4); \
1328 _argvec[5] = (unsigned long)(arg5); \
1329 _argvec[6] = (unsigned long)(arg6); \
1330 __asm__ volatile( \
1331 VALGRIND_ALIGN_STACK \
1332 "subl $8, %%esp\n\t" \
1333 "pushl 24(%%eax)\n\t" \
1334 "pushl 20(%%eax)\n\t" \
1335 "pushl 16(%%eax)\n\t" \
1336 "pushl 12(%%eax)\n\t" \
1337 "pushl 8(%%eax)\n\t" \
1338 "pushl 4(%%eax)\n\t" \
1339 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1340 VALGRIND_CALL_NOREDIR_EAX \
1341 VALGRIND_RESTORE_STACK \
1342 : /*out*/ "=a" (_res) \
1343 : /*in*/ "a" (&_argvec[0]) \
1344 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1345 ); \
1346 lval = (__typeof__(lval)) _res; \
1347 } while (0)
1348
1349 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1350 arg7) \
1351 do { \
1352 volatile OrigFn _orig = (orig); \
1353 volatile unsigned long _argvec[8]; \
1354 volatile unsigned long _res; \
1355 _argvec[0] = (unsigned long)_orig.nraddr; \
1356 _argvec[1] = (unsigned long)(arg1); \
1357 _argvec[2] = (unsigned long)(arg2); \
1358 _argvec[3] = (unsigned long)(arg3); \
1359 _argvec[4] = (unsigned long)(arg4); \
1360 _argvec[5] = (unsigned long)(arg5); \
1361 _argvec[6] = (unsigned long)(arg6); \
1362 _argvec[7] = (unsigned long)(arg7); \
1363 __asm__ volatile( \
1364 VALGRIND_ALIGN_STACK \
1365 "subl $4, %%esp\n\t" \
1366 "pushl 28(%%eax)\n\t" \
1367 "pushl 24(%%eax)\n\t" \
1368 "pushl 20(%%eax)\n\t" \
1369 "pushl 16(%%eax)\n\t" \
1370 "pushl 12(%%eax)\n\t" \
1371 "pushl 8(%%eax)\n\t" \
1372 "pushl 4(%%eax)\n\t" \
1373 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1374 VALGRIND_CALL_NOREDIR_EAX \
1375 VALGRIND_RESTORE_STACK \
1376 : /*out*/ "=a" (_res) \
1377 : /*in*/ "a" (&_argvec[0]) \
1378 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1379 ); \
1380 lval = (__typeof__(lval)) _res; \
1381 } while (0)
1382
1383 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1384 arg7,arg8) \
1385 do { \
1386 volatile OrigFn _orig = (orig); \
1387 volatile unsigned long _argvec[9]; \
1388 volatile unsigned long _res; \
1389 _argvec[0] = (unsigned long)_orig.nraddr; \
1390 _argvec[1] = (unsigned long)(arg1); \
1391 _argvec[2] = (unsigned long)(arg2); \
1392 _argvec[3] = (unsigned long)(arg3); \
1393 _argvec[4] = (unsigned long)(arg4); \
1394 _argvec[5] = (unsigned long)(arg5); \
1395 _argvec[6] = (unsigned long)(arg6); \
1396 _argvec[7] = (unsigned long)(arg7); \
1397 _argvec[8] = (unsigned long)(arg8); \
1398 __asm__ volatile( \
1399 VALGRIND_ALIGN_STACK \
1400 "pushl 32(%%eax)\n\t" \
1401 "pushl 28(%%eax)\n\t" \
1402 "pushl 24(%%eax)\n\t" \
1403 "pushl 20(%%eax)\n\t" \
1404 "pushl 16(%%eax)\n\t" \
1405 "pushl 12(%%eax)\n\t" \
1406 "pushl 8(%%eax)\n\t" \
1407 "pushl 4(%%eax)\n\t" \
1408 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1409 VALGRIND_CALL_NOREDIR_EAX \
1410 VALGRIND_RESTORE_STACK \
1411 : /*out*/ "=a" (_res) \
1412 : /*in*/ "a" (&_argvec[0]) \
1413 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1414 ); \
1415 lval = (__typeof__(lval)) _res; \
1416 } while (0)
1417
1418 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1419 arg7,arg8,arg9) \
1420 do { \
1421 volatile OrigFn _orig = (orig); \
1422 volatile unsigned long _argvec[10]; \
1423 volatile unsigned long _res; \
1424 _argvec[0] = (unsigned long)_orig.nraddr; \
1425 _argvec[1] = (unsigned long)(arg1); \
1426 _argvec[2] = (unsigned long)(arg2); \
1427 _argvec[3] = (unsigned long)(arg3); \
1428 _argvec[4] = (unsigned long)(arg4); \
1429 _argvec[5] = (unsigned long)(arg5); \
1430 _argvec[6] = (unsigned long)(arg6); \
1431 _argvec[7] = (unsigned long)(arg7); \
1432 _argvec[8] = (unsigned long)(arg8); \
1433 _argvec[9] = (unsigned long)(arg9); \
1434 __asm__ volatile( \
1435 VALGRIND_ALIGN_STACK \
1436 "subl $12, %%esp\n\t" \
1437 "pushl 36(%%eax)\n\t" \
1438 "pushl 32(%%eax)\n\t" \
1439 "pushl 28(%%eax)\n\t" \
1440 "pushl 24(%%eax)\n\t" \
1441 "pushl 20(%%eax)\n\t" \
1442 "pushl 16(%%eax)\n\t" \
1443 "pushl 12(%%eax)\n\t" \
1444 "pushl 8(%%eax)\n\t" \
1445 "pushl 4(%%eax)\n\t" \
1446 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1447 VALGRIND_CALL_NOREDIR_EAX \
1448 VALGRIND_RESTORE_STACK \
1449 : /*out*/ "=a" (_res) \
1450 : /*in*/ "a" (&_argvec[0]) \
1451 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1452 ); \
1453 lval = (__typeof__(lval)) _res; \
1454 } while (0)
1455
1456 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1457 arg7,arg8,arg9,arg10) \
1458 do { \
1459 volatile OrigFn _orig = (orig); \
1460 volatile unsigned long _argvec[11]; \
1461 volatile unsigned long _res; \
1462 _argvec[0] = (unsigned long)_orig.nraddr; \
1463 _argvec[1] = (unsigned long)(arg1); \
1464 _argvec[2] = (unsigned long)(arg2); \
1465 _argvec[3] = (unsigned long)(arg3); \
1466 _argvec[4] = (unsigned long)(arg4); \
1467 _argvec[5] = (unsigned long)(arg5); \
1468 _argvec[6] = (unsigned long)(arg6); \
1469 _argvec[7] = (unsigned long)(arg7); \
1470 _argvec[8] = (unsigned long)(arg8); \
1471 _argvec[9] = (unsigned long)(arg9); \
1472 _argvec[10] = (unsigned long)(arg10); \
1473 __asm__ volatile( \
1474 VALGRIND_ALIGN_STACK \
1475 "subl $8, %%esp\n\t" \
1476 "pushl 40(%%eax)\n\t" \
1477 "pushl 36(%%eax)\n\t" \
1478 "pushl 32(%%eax)\n\t" \
1479 "pushl 28(%%eax)\n\t" \
1480 "pushl 24(%%eax)\n\t" \
1481 "pushl 20(%%eax)\n\t" \
1482 "pushl 16(%%eax)\n\t" \
1483 "pushl 12(%%eax)\n\t" \
1484 "pushl 8(%%eax)\n\t" \
1485 "pushl 4(%%eax)\n\t" \
1486 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1487 VALGRIND_CALL_NOREDIR_EAX \
1488 VALGRIND_RESTORE_STACK \
1489 : /*out*/ "=a" (_res) \
1490 : /*in*/ "a" (&_argvec[0]) \
1491 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1492 ); \
1493 lval = (__typeof__(lval)) _res; \
1494 } while (0)
1495
1496 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1497 arg6,arg7,arg8,arg9,arg10, \
1498 arg11) \
1499 do { \
1500 volatile OrigFn _orig = (orig); \
1501 volatile unsigned long _argvec[12]; \
1502 volatile unsigned long _res; \
1503 _argvec[0] = (unsigned long)_orig.nraddr; \
1504 _argvec[1] = (unsigned long)(arg1); \
1505 _argvec[2] = (unsigned long)(arg2); \
1506 _argvec[3] = (unsigned long)(arg3); \
1507 _argvec[4] = (unsigned long)(arg4); \
1508 _argvec[5] = (unsigned long)(arg5); \
1509 _argvec[6] = (unsigned long)(arg6); \
1510 _argvec[7] = (unsigned long)(arg7); \
1511 _argvec[8] = (unsigned long)(arg8); \
1512 _argvec[9] = (unsigned long)(arg9); \
1513 _argvec[10] = (unsigned long)(arg10); \
1514 _argvec[11] = (unsigned long)(arg11); \
1515 __asm__ volatile( \
1516 VALGRIND_ALIGN_STACK \
1517 "subl $4, %%esp\n\t" \
1518 "pushl 44(%%eax)\n\t" \
1519 "pushl 40(%%eax)\n\t" \
1520 "pushl 36(%%eax)\n\t" \
1521 "pushl 32(%%eax)\n\t" \
1522 "pushl 28(%%eax)\n\t" \
1523 "pushl 24(%%eax)\n\t" \
1524 "pushl 20(%%eax)\n\t" \
1525 "pushl 16(%%eax)\n\t" \
1526 "pushl 12(%%eax)\n\t" \
1527 "pushl 8(%%eax)\n\t" \
1528 "pushl 4(%%eax)\n\t" \
1529 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1530 VALGRIND_CALL_NOREDIR_EAX \
1531 VALGRIND_RESTORE_STACK \
1532 : /*out*/ "=a" (_res) \
1533 : /*in*/ "a" (&_argvec[0]) \
1534 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1535 ); \
1536 lval = (__typeof__(lval)) _res; \
1537 } while (0)
1538
1539 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1540 arg6,arg7,arg8,arg9,arg10, \
1541 arg11,arg12) \
1542 do { \
1543 volatile OrigFn _orig = (orig); \
1544 volatile unsigned long _argvec[13]; \
1545 volatile unsigned long _res; \
1546 _argvec[0] = (unsigned long)_orig.nraddr; \
1547 _argvec[1] = (unsigned long)(arg1); \
1548 _argvec[2] = (unsigned long)(arg2); \
1549 _argvec[3] = (unsigned long)(arg3); \
1550 _argvec[4] = (unsigned long)(arg4); \
1551 _argvec[5] = (unsigned long)(arg5); \
1552 _argvec[6] = (unsigned long)(arg6); \
1553 _argvec[7] = (unsigned long)(arg7); \
1554 _argvec[8] = (unsigned long)(arg8); \
1555 _argvec[9] = (unsigned long)(arg9); \
1556 _argvec[10] = (unsigned long)(arg10); \
1557 _argvec[11] = (unsigned long)(arg11); \
1558 _argvec[12] = (unsigned long)(arg12); \
1559 __asm__ volatile( \
1560 VALGRIND_ALIGN_STACK \
1561 "pushl 48(%%eax)\n\t" \
1562 "pushl 44(%%eax)\n\t" \
1563 "pushl 40(%%eax)\n\t" \
1564 "pushl 36(%%eax)\n\t" \
1565 "pushl 32(%%eax)\n\t" \
1566 "pushl 28(%%eax)\n\t" \
1567 "pushl 24(%%eax)\n\t" \
1568 "pushl 20(%%eax)\n\t" \
1569 "pushl 16(%%eax)\n\t" \
1570 "pushl 12(%%eax)\n\t" \
1571 "pushl 8(%%eax)\n\t" \
1572 "pushl 4(%%eax)\n\t" \
1573 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1574 VALGRIND_CALL_NOREDIR_EAX \
1575 VALGRIND_RESTORE_STACK \
1576 : /*out*/ "=a" (_res) \
1577 : /*in*/ "a" (&_argvec[0]) \
1578 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1579 ); \
1580 lval = (__typeof__(lval)) _res; \
1581 } while (0)
1582
1583 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1584
1585 /* ------------------------ amd64-{linux,darwin} --------------- */
1586
1587 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1588
1589 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1590
1591 /* These regs are trashed by the hidden call. */
1592 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1593 "rdi", "r8", "r9", "r10", "r11"
1594
1595 /* This is all pretty complex. It's so as to make stack unwinding
1596 work reliably. See bug 243270. The basic problem is the sub and
1597 add of 128 of %rsp in all of the following macros. If gcc believes
1598 the CFA is in %rsp, then unwinding may fail, because what's at the
1599 CFA is not what gcc "expected" when it constructs the CFIs for the
1600 places where the macros are instantiated.
1601
1602 But we can't just add a CFI annotation to increase the CFA offset
1603 by 128, to match the sub of 128 from %rsp, because we don't know
1604 whether gcc has chosen %rsp as the CFA at that point, or whether it
1605 has chosen some other register (eg, %rbp). In the latter case,
1606 adding a CFI annotation to change the CFA offset is simply wrong.
1607
1608 So the solution is to get hold of the CFA using
1609 __builtin_dwarf_cfa(), put it in a known register, and add a
1610 CFI annotation to say what the register is. We choose %rbp for
1611 this (perhaps perversely), because:
1612
1613 (1) %rbp is already subject to unwinding. If a new register was
1614 chosen then the unwinder would have to unwind it in all stack
1615 traces, which is expensive, and
1616
1617 (2) %rbp is already subject to precise exception updates in the
1618 JIT. If a new register was chosen, we'd have to have precise
1619 exceptions for it too, which reduces performance of the
1620 generated code.
1621
1622 However .. one extra complication. We can't just whack the result
1623 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1624 list of trashed registers at the end of the inline assembly
1625 fragments; gcc won't allow %rbp to appear in that list. Hence
1626 instead we need to stash %rbp in %r15 for the duration of the asm,
1627 and say that %r15 is trashed instead. gcc seems happy to go with
1628 that.
1629
1630 Oh .. and this all needs to be conditionalised so that it is
1631 unchanged from before this commit, when compiled with older gccs
1632 that don't support __builtin_dwarf_cfa. Furthermore, since
1633 this header file is freestanding, it has to be independent of
1634 config.h, and so the following conditionalisation cannot depend on
1635 configure time checks.
1636
1637 Although it's not clear from
1638 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1639 this expression excludes Darwin.
1640 .cfi directives in Darwin assembly appear to be completely
1641 different and I haven't investigated how they work.
1642
1643 For even more entertainment value, note we have to use the
1644 completely undocumented __builtin_dwarf_cfa(), which appears to
1645 really compute the CFA, whereas __builtin_frame_address(0) claims
1646 to but actually doesn't. See
1647 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1648 */
1649 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1650 # define __FRAME_POINTER \
1651 ,"r"(__builtin_dwarf_cfa())
1652 # define VALGRIND_CFI_PROLOGUE \
1653 "movq %%rbp, %%r15\n\t" \
1654 "movq %2, %%rbp\n\t" \
1655 ".cfi_remember_state\n\t" \
1656 ".cfi_def_cfa rbp, 0\n\t"
1657 # define VALGRIND_CFI_EPILOGUE \
1658 "movq %%r15, %%rbp\n\t" \
1659 ".cfi_restore_state\n\t"
1660 #else
1661 # define __FRAME_POINTER
1662 # define VALGRIND_CFI_PROLOGUE
1663 # define VALGRIND_CFI_EPILOGUE
1664 #endif
1665
1666 /* Macros to save and align the stack before making a function
1667 call and restore it afterwards as gcc may not keep the stack
1668 pointer aligned if it doesn't realise calls are being made
1669 to other functions. */
1670
1671 #define VALGRIND_ALIGN_STACK \
1672 "movq %%rsp,%%r14\n\t" \
1673 "andq $0xfffffffffffffff0,%%rsp\n\t"
1674 #define VALGRIND_RESTORE_STACK \
1675 "movq %%r14,%%rsp\n\t"
1676
1677 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1678 long) == 8. */
1679
1680 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1681 macros. In order not to trash the stack redzone, we need to drop
1682 %rsp by 128 before the hidden call, and restore afterwards. The
1683 nastyness is that it is only by luck that the stack still appears
1684 to be unwindable during the hidden call - since then the behaviour
1685 of any routine using this macro does not match what the CFI data
1686 says. Sigh.
1687
1688 Why is this important? Imagine that a wrapper has a stack
1689 allocated local, and passes to the hidden call, a pointer to it.
1690 Because gcc does not know about the hidden call, it may allocate
1691 that local in the redzone. Unfortunately the hidden call may then
1692 trash it before it comes to use it. So we must step clear of the
1693 redzone, for the duration of the hidden call, to make it safe.
1694
1695 Probably the same problem afflicts the other redzone-style ABIs too
1696 (ppc64-linux); but for those, the stack is
1697 self describing (none of this CFI nonsense) so at least messing
1698 with the stack pointer doesn't give a danger of non-unwindable
1699 stack. */
1700
1701 #define CALL_FN_W_v(lval, orig) \
1702 do { \
1703 volatile OrigFn _orig = (orig); \
1704 volatile unsigned long _argvec[1]; \
1705 volatile unsigned long _res; \
1706 _argvec[0] = (unsigned long)_orig.nraddr; \
1707 __asm__ volatile( \
1708 VALGRIND_CFI_PROLOGUE \
1709 VALGRIND_ALIGN_STACK \
1710 "subq $128,%%rsp\n\t" \
1711 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1712 VALGRIND_CALL_NOREDIR_RAX \
1713 VALGRIND_RESTORE_STACK \
1714 VALGRIND_CFI_EPILOGUE \
1715 : /*out*/ "=a" (_res) \
1716 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1717 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1718 ); \
1719 lval = (__typeof__(lval)) _res; \
1720 } while (0)
1721
1722 #define CALL_FN_W_W(lval, orig, arg1) \
1723 do { \
1724 volatile OrigFn _orig = (orig); \
1725 volatile unsigned long _argvec[2]; \
1726 volatile unsigned long _res; \
1727 _argvec[0] = (unsigned long)_orig.nraddr; \
1728 _argvec[1] = (unsigned long)(arg1); \
1729 __asm__ volatile( \
1730 VALGRIND_CFI_PROLOGUE \
1731 VALGRIND_ALIGN_STACK \
1732 "subq $128,%%rsp\n\t" \
1733 "movq 8(%%rax), %%rdi\n\t" \
1734 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1735 VALGRIND_CALL_NOREDIR_RAX \
1736 VALGRIND_RESTORE_STACK \
1737 VALGRIND_CFI_EPILOGUE \
1738 : /*out*/ "=a" (_res) \
1739 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1740 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1741 ); \
1742 lval = (__typeof__(lval)) _res; \
1743 } while (0)
1744
1745 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1746 do { \
1747 volatile OrigFn _orig = (orig); \
1748 volatile unsigned long _argvec[3]; \
1749 volatile unsigned long _res; \
1750 _argvec[0] = (unsigned long)_orig.nraddr; \
1751 _argvec[1] = (unsigned long)(arg1); \
1752 _argvec[2] = (unsigned long)(arg2); \
1753 __asm__ volatile( \
1754 VALGRIND_CFI_PROLOGUE \
1755 VALGRIND_ALIGN_STACK \
1756 "subq $128,%%rsp\n\t" \
1757 "movq 16(%%rax), %%rsi\n\t" \
1758 "movq 8(%%rax), %%rdi\n\t" \
1759 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1760 VALGRIND_CALL_NOREDIR_RAX \
1761 VALGRIND_RESTORE_STACK \
1762 VALGRIND_CFI_EPILOGUE \
1763 : /*out*/ "=a" (_res) \
1764 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1765 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1766 ); \
1767 lval = (__typeof__(lval)) _res; \
1768 } while (0)
1769
1770 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1771 do { \
1772 volatile OrigFn _orig = (orig); \
1773 volatile unsigned long _argvec[4]; \
1774 volatile unsigned long _res; \
1775 _argvec[0] = (unsigned long)_orig.nraddr; \
1776 _argvec[1] = (unsigned long)(arg1); \
1777 _argvec[2] = (unsigned long)(arg2); \
1778 _argvec[3] = (unsigned long)(arg3); \
1779 __asm__ volatile( \
1780 VALGRIND_CFI_PROLOGUE \
1781 VALGRIND_ALIGN_STACK \
1782 "subq $128,%%rsp\n\t" \
1783 "movq 24(%%rax), %%rdx\n\t" \
1784 "movq 16(%%rax), %%rsi\n\t" \
1785 "movq 8(%%rax), %%rdi\n\t" \
1786 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1787 VALGRIND_CALL_NOREDIR_RAX \
1788 VALGRIND_RESTORE_STACK \
1789 VALGRIND_CFI_EPILOGUE \
1790 : /*out*/ "=a" (_res) \
1791 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1792 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1793 ); \
1794 lval = (__typeof__(lval)) _res; \
1795 } while (0)
1796
1797 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1798 do { \
1799 volatile OrigFn _orig = (orig); \
1800 volatile unsigned long _argvec[5]; \
1801 volatile unsigned long _res; \
1802 _argvec[0] = (unsigned long)_orig.nraddr; \
1803 _argvec[1] = (unsigned long)(arg1); \
1804 _argvec[2] = (unsigned long)(arg2); \
1805 _argvec[3] = (unsigned long)(arg3); \
1806 _argvec[4] = (unsigned long)(arg4); \
1807 __asm__ volatile( \
1808 VALGRIND_CFI_PROLOGUE \
1809 VALGRIND_ALIGN_STACK \
1810 "subq $128,%%rsp\n\t" \
1811 "movq 32(%%rax), %%rcx\n\t" \
1812 "movq 24(%%rax), %%rdx\n\t" \
1813 "movq 16(%%rax), %%rsi\n\t" \
1814 "movq 8(%%rax), %%rdi\n\t" \
1815 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1816 VALGRIND_CALL_NOREDIR_RAX \
1817 VALGRIND_RESTORE_STACK \
1818 VALGRIND_CFI_EPILOGUE \
1819 : /*out*/ "=a" (_res) \
1820 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1821 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1822 ); \
1823 lval = (__typeof__(lval)) _res; \
1824 } while (0)
1825
1826 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1827 do { \
1828 volatile OrigFn _orig = (orig); \
1829 volatile unsigned long _argvec[6]; \
1830 volatile unsigned long _res; \
1831 _argvec[0] = (unsigned long)_orig.nraddr; \
1832 _argvec[1] = (unsigned long)(arg1); \
1833 _argvec[2] = (unsigned long)(arg2); \
1834 _argvec[3] = (unsigned long)(arg3); \
1835 _argvec[4] = (unsigned long)(arg4); \
1836 _argvec[5] = (unsigned long)(arg5); \
1837 __asm__ volatile( \
1838 VALGRIND_CFI_PROLOGUE \
1839 VALGRIND_ALIGN_STACK \
1840 "subq $128,%%rsp\n\t" \
1841 "movq 40(%%rax), %%r8\n\t" \
1842 "movq 32(%%rax), %%rcx\n\t" \
1843 "movq 24(%%rax), %%rdx\n\t" \
1844 "movq 16(%%rax), %%rsi\n\t" \
1845 "movq 8(%%rax), %%rdi\n\t" \
1846 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1847 VALGRIND_CALL_NOREDIR_RAX \
1848 VALGRIND_RESTORE_STACK \
1849 VALGRIND_CFI_EPILOGUE \
1850 : /*out*/ "=a" (_res) \
1851 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1852 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1853 ); \
1854 lval = (__typeof__(lval)) _res; \
1855 } while (0)
1856
1857 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1858 do { \
1859 volatile OrigFn _orig = (orig); \
1860 volatile unsigned long _argvec[7]; \
1861 volatile unsigned long _res; \
1862 _argvec[0] = (unsigned long)_orig.nraddr; \
1863 _argvec[1] = (unsigned long)(arg1); \
1864 _argvec[2] = (unsigned long)(arg2); \
1865 _argvec[3] = (unsigned long)(arg3); \
1866 _argvec[4] = (unsigned long)(arg4); \
1867 _argvec[5] = (unsigned long)(arg5); \
1868 _argvec[6] = (unsigned long)(arg6); \
1869 __asm__ volatile( \
1870 VALGRIND_CFI_PROLOGUE \
1871 VALGRIND_ALIGN_STACK \
1872 "subq $128,%%rsp\n\t" \
1873 "movq 48(%%rax), %%r9\n\t" \
1874 "movq 40(%%rax), %%r8\n\t" \
1875 "movq 32(%%rax), %%rcx\n\t" \
1876 "movq 24(%%rax), %%rdx\n\t" \
1877 "movq 16(%%rax), %%rsi\n\t" \
1878 "movq 8(%%rax), %%rdi\n\t" \
1879 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1880 VALGRIND_CALL_NOREDIR_RAX \
1881 VALGRIND_RESTORE_STACK \
1882 VALGRIND_CFI_EPILOGUE \
1883 : /*out*/ "=a" (_res) \
1884 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1885 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1886 ); \
1887 lval = (__typeof__(lval)) _res; \
1888 } while (0)
1889
1890 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1891 arg7) \
1892 do { \
1893 volatile OrigFn _orig = (orig); \
1894 volatile unsigned long _argvec[8]; \
1895 volatile unsigned long _res; \
1896 _argvec[0] = (unsigned long)_orig.nraddr; \
1897 _argvec[1] = (unsigned long)(arg1); \
1898 _argvec[2] = (unsigned long)(arg2); \
1899 _argvec[3] = (unsigned long)(arg3); \
1900 _argvec[4] = (unsigned long)(arg4); \
1901 _argvec[5] = (unsigned long)(arg5); \
1902 _argvec[6] = (unsigned long)(arg6); \
1903 _argvec[7] = (unsigned long)(arg7); \
1904 __asm__ volatile( \
1905 VALGRIND_CFI_PROLOGUE \
1906 VALGRIND_ALIGN_STACK \
1907 "subq $136,%%rsp\n\t" \
1908 "pushq 56(%%rax)\n\t" \
1909 "movq 48(%%rax), %%r9\n\t" \
1910 "movq 40(%%rax), %%r8\n\t" \
1911 "movq 32(%%rax), %%rcx\n\t" \
1912 "movq 24(%%rax), %%rdx\n\t" \
1913 "movq 16(%%rax), %%rsi\n\t" \
1914 "movq 8(%%rax), %%rdi\n\t" \
1915 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1916 VALGRIND_CALL_NOREDIR_RAX \
1917 VALGRIND_RESTORE_STACK \
1918 VALGRIND_CFI_EPILOGUE \
1919 : /*out*/ "=a" (_res) \
1920 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1921 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1922 ); \
1923 lval = (__typeof__(lval)) _res; \
1924 } while (0)
1925
1926 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1927 arg7,arg8) \
1928 do { \
1929 volatile OrigFn _orig = (orig); \
1930 volatile unsigned long _argvec[9]; \
1931 volatile unsigned long _res; \
1932 _argvec[0] = (unsigned long)_orig.nraddr; \
1933 _argvec[1] = (unsigned long)(arg1); \
1934 _argvec[2] = (unsigned long)(arg2); \
1935 _argvec[3] = (unsigned long)(arg3); \
1936 _argvec[4] = (unsigned long)(arg4); \
1937 _argvec[5] = (unsigned long)(arg5); \
1938 _argvec[6] = (unsigned long)(arg6); \
1939 _argvec[7] = (unsigned long)(arg7); \
1940 _argvec[8] = (unsigned long)(arg8); \
1941 __asm__ volatile( \
1942 VALGRIND_CFI_PROLOGUE \
1943 VALGRIND_ALIGN_STACK \
1944 "subq $128,%%rsp\n\t" \
1945 "pushq 64(%%rax)\n\t" \
1946 "pushq 56(%%rax)\n\t" \
1947 "movq 48(%%rax), %%r9\n\t" \
1948 "movq 40(%%rax), %%r8\n\t" \
1949 "movq 32(%%rax), %%rcx\n\t" \
1950 "movq 24(%%rax), %%rdx\n\t" \
1951 "movq 16(%%rax), %%rsi\n\t" \
1952 "movq 8(%%rax), %%rdi\n\t" \
1953 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1954 VALGRIND_CALL_NOREDIR_RAX \
1955 VALGRIND_RESTORE_STACK \
1956 VALGRIND_CFI_EPILOGUE \
1957 : /*out*/ "=a" (_res) \
1958 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1959 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1960 ); \
1961 lval = (__typeof__(lval)) _res; \
1962 } while (0)
1963
1964 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1965 arg7,arg8,arg9) \
1966 do { \
1967 volatile OrigFn _orig = (orig); \
1968 volatile unsigned long _argvec[10]; \
1969 volatile unsigned long _res; \
1970 _argvec[0] = (unsigned long)_orig.nraddr; \
1971 _argvec[1] = (unsigned long)(arg1); \
1972 _argvec[2] = (unsigned long)(arg2); \
1973 _argvec[3] = (unsigned long)(arg3); \
1974 _argvec[4] = (unsigned long)(arg4); \
1975 _argvec[5] = (unsigned long)(arg5); \
1976 _argvec[6] = (unsigned long)(arg6); \
1977 _argvec[7] = (unsigned long)(arg7); \
1978 _argvec[8] = (unsigned long)(arg8); \
1979 _argvec[9] = (unsigned long)(arg9); \
1980 __asm__ volatile( \
1981 VALGRIND_CFI_PROLOGUE \
1982 VALGRIND_ALIGN_STACK \
1983 "subq $136,%%rsp\n\t" \
1984 "pushq 72(%%rax)\n\t" \
1985 "pushq 64(%%rax)\n\t" \
1986 "pushq 56(%%rax)\n\t" \
1987 "movq 48(%%rax), %%r9\n\t" \
1988 "movq 40(%%rax), %%r8\n\t" \
1989 "movq 32(%%rax), %%rcx\n\t" \
1990 "movq 24(%%rax), %%rdx\n\t" \
1991 "movq 16(%%rax), %%rsi\n\t" \
1992 "movq 8(%%rax), %%rdi\n\t" \
1993 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1994 VALGRIND_CALL_NOREDIR_RAX \
1995 VALGRIND_RESTORE_STACK \
1996 VALGRIND_CFI_EPILOGUE \
1997 : /*out*/ "=a" (_res) \
1998 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1999 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2000 ); \
2001 lval = (__typeof__(lval)) _res; \
2002 } while (0)
2003
2004 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2005 arg7,arg8,arg9,arg10) \
2006 do { \
2007 volatile OrigFn _orig = (orig); \
2008 volatile unsigned long _argvec[11]; \
2009 volatile unsigned long _res; \
2010 _argvec[0] = (unsigned long)_orig.nraddr; \
2011 _argvec[1] = (unsigned long)(arg1); \
2012 _argvec[2] = (unsigned long)(arg2); \
2013 _argvec[3] = (unsigned long)(arg3); \
2014 _argvec[4] = (unsigned long)(arg4); \
2015 _argvec[5] = (unsigned long)(arg5); \
2016 _argvec[6] = (unsigned long)(arg6); \
2017 _argvec[7] = (unsigned long)(arg7); \
2018 _argvec[8] = (unsigned long)(arg8); \
2019 _argvec[9] = (unsigned long)(arg9); \
2020 _argvec[10] = (unsigned long)(arg10); \
2021 __asm__ volatile( \
2022 VALGRIND_CFI_PROLOGUE \
2023 VALGRIND_ALIGN_STACK \
2024 "subq $128,%%rsp\n\t" \
2025 "pushq 80(%%rax)\n\t" \
2026 "pushq 72(%%rax)\n\t" \
2027 "pushq 64(%%rax)\n\t" \
2028 "pushq 56(%%rax)\n\t" \
2029 "movq 48(%%rax), %%r9\n\t" \
2030 "movq 40(%%rax), %%r8\n\t" \
2031 "movq 32(%%rax), %%rcx\n\t" \
2032 "movq 24(%%rax), %%rdx\n\t" \
2033 "movq 16(%%rax), %%rsi\n\t" \
2034 "movq 8(%%rax), %%rdi\n\t" \
2035 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2036 VALGRIND_CALL_NOREDIR_RAX \
2037 VALGRIND_RESTORE_STACK \
2038 VALGRIND_CFI_EPILOGUE \
2039 : /*out*/ "=a" (_res) \
2040 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2041 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2042 ); \
2043 lval = (__typeof__(lval)) _res; \
2044 } while (0)
2045
2046 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2047 arg7,arg8,arg9,arg10,arg11) \
2048 do { \
2049 volatile OrigFn _orig = (orig); \
2050 volatile unsigned long _argvec[12]; \
2051 volatile unsigned long _res; \
2052 _argvec[0] = (unsigned long)_orig.nraddr; \
2053 _argvec[1] = (unsigned long)(arg1); \
2054 _argvec[2] = (unsigned long)(arg2); \
2055 _argvec[3] = (unsigned long)(arg3); \
2056 _argvec[4] = (unsigned long)(arg4); \
2057 _argvec[5] = (unsigned long)(arg5); \
2058 _argvec[6] = (unsigned long)(arg6); \
2059 _argvec[7] = (unsigned long)(arg7); \
2060 _argvec[8] = (unsigned long)(arg8); \
2061 _argvec[9] = (unsigned long)(arg9); \
2062 _argvec[10] = (unsigned long)(arg10); \
2063 _argvec[11] = (unsigned long)(arg11); \
2064 __asm__ volatile( \
2065 VALGRIND_CFI_PROLOGUE \
2066 VALGRIND_ALIGN_STACK \
2067 "subq $136,%%rsp\n\t" \
2068 "pushq 88(%%rax)\n\t" \
2069 "pushq 80(%%rax)\n\t" \
2070 "pushq 72(%%rax)\n\t" \
2071 "pushq 64(%%rax)\n\t" \
2072 "pushq 56(%%rax)\n\t" \
2073 "movq 48(%%rax), %%r9\n\t" \
2074 "movq 40(%%rax), %%r8\n\t" \
2075 "movq 32(%%rax), %%rcx\n\t" \
2076 "movq 24(%%rax), %%rdx\n\t" \
2077 "movq 16(%%rax), %%rsi\n\t" \
2078 "movq 8(%%rax), %%rdi\n\t" \
2079 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2080 VALGRIND_CALL_NOREDIR_RAX \
2081 VALGRIND_RESTORE_STACK \
2082 VALGRIND_CFI_EPILOGUE \
2083 : /*out*/ "=a" (_res) \
2084 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2085 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2086 ); \
2087 lval = (__typeof__(lval)) _res; \
2088 } while (0)
2089
2090 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2091 arg7,arg8,arg9,arg10,arg11,arg12) \
2092 do { \
2093 volatile OrigFn _orig = (orig); \
2094 volatile unsigned long _argvec[13]; \
2095 volatile unsigned long _res; \
2096 _argvec[0] = (unsigned long)_orig.nraddr; \
2097 _argvec[1] = (unsigned long)(arg1); \
2098 _argvec[2] = (unsigned long)(arg2); \
2099 _argvec[3] = (unsigned long)(arg3); \
2100 _argvec[4] = (unsigned long)(arg4); \
2101 _argvec[5] = (unsigned long)(arg5); \
2102 _argvec[6] = (unsigned long)(arg6); \
2103 _argvec[7] = (unsigned long)(arg7); \
2104 _argvec[8] = (unsigned long)(arg8); \
2105 _argvec[9] = (unsigned long)(arg9); \
2106 _argvec[10] = (unsigned long)(arg10); \
2107 _argvec[11] = (unsigned long)(arg11); \
2108 _argvec[12] = (unsigned long)(arg12); \
2109 __asm__ volatile( \
2110 VALGRIND_CFI_PROLOGUE \
2111 VALGRIND_ALIGN_STACK \
2112 "subq $128,%%rsp\n\t" \
2113 "pushq 96(%%rax)\n\t" \
2114 "pushq 88(%%rax)\n\t" \
2115 "pushq 80(%%rax)\n\t" \
2116 "pushq 72(%%rax)\n\t" \
2117 "pushq 64(%%rax)\n\t" \
2118 "pushq 56(%%rax)\n\t" \
2119 "movq 48(%%rax), %%r9\n\t" \
2120 "movq 40(%%rax), %%r8\n\t" \
2121 "movq 32(%%rax), %%rcx\n\t" \
2122 "movq 24(%%rax), %%rdx\n\t" \
2123 "movq 16(%%rax), %%rsi\n\t" \
2124 "movq 8(%%rax), %%rdi\n\t" \
2125 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2126 VALGRIND_CALL_NOREDIR_RAX \
2127 VALGRIND_RESTORE_STACK \
2128 VALGRIND_CFI_EPILOGUE \
2129 : /*out*/ "=a" (_res) \
2130 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2131 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2132 ); \
2133 lval = (__typeof__(lval)) _res; \
2134 } while (0)
2135
2136 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2137
2138 /* ------------------------ ppc32-linux ------------------------ */
2139
2140 #if defined(PLAT_ppc32_linux)
2141
2142 /* This is useful for finding out about the on-stack stuff:
2143
2144 extern int f9 ( int,int,int,int,int,int,int,int,int );
2145 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2146 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2147 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2148
2149 int g9 ( void ) {
2150 return f9(11,22,33,44,55,66,77,88,99);
2151 }
2152 int g10 ( void ) {
2153 return f10(11,22,33,44,55,66,77,88,99,110);
2154 }
2155 int g11 ( void ) {
2156 return f11(11,22,33,44,55,66,77,88,99,110,121);
2157 }
2158 int g12 ( void ) {
2159 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2160 }
2161 */
2162
2163 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2164
2165 /* These regs are trashed by the hidden call. */
2166 #define __CALLER_SAVED_REGS \
2167 "lr", "ctr", "xer", \
2168 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2169 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2170 "r11", "r12", "r13"
2171
2172 /* Macros to save and align the stack before making a function
2173 call and restore it afterwards as gcc may not keep the stack
2174 pointer aligned if it doesn't realise calls are being made
2175 to other functions. */
2176
2177 #define VALGRIND_ALIGN_STACK \
2178 "mr 28,1\n\t" \
2179 "rlwinm 1,1,0,0,27\n\t"
2180 #define VALGRIND_RESTORE_STACK \
2181 "mr 1,28\n\t"
2182
2183 /* These CALL_FN_ macros assume that on ppc32-linux,
2184 sizeof(unsigned long) == 4. */
2185
2186 #define CALL_FN_W_v(lval, orig) \
2187 do { \
2188 volatile OrigFn _orig = (orig); \
2189 volatile unsigned long _argvec[1]; \
2190 volatile unsigned long _res; \
2191 _argvec[0] = (unsigned long)_orig.nraddr; \
2192 __asm__ volatile( \
2193 VALGRIND_ALIGN_STACK \
2194 "mr 11,%1\n\t" \
2195 "lwz 11,0(11)\n\t" /* target->r11 */ \
2196 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2197 VALGRIND_RESTORE_STACK \
2198 "mr %0,3" \
2199 : /*out*/ "=r" (_res) \
2200 : /*in*/ "r" (&_argvec[0]) \
2201 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2202 ); \
2203 lval = (__typeof__(lval)) _res; \
2204 } while (0)
2205
2206 #define CALL_FN_W_W(lval, orig, arg1) \
2207 do { \
2208 volatile OrigFn _orig = (orig); \
2209 volatile unsigned long _argvec[2]; \
2210 volatile unsigned long _res; \
2211 _argvec[0] = (unsigned long)_orig.nraddr; \
2212 _argvec[1] = (unsigned long)arg1; \
2213 __asm__ volatile( \
2214 VALGRIND_ALIGN_STACK \
2215 "mr 11,%1\n\t" \
2216 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2217 "lwz 11,0(11)\n\t" /* target->r11 */ \
2218 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2219 VALGRIND_RESTORE_STACK \
2220 "mr %0,3" \
2221 : /*out*/ "=r" (_res) \
2222 : /*in*/ "r" (&_argvec[0]) \
2223 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2224 ); \
2225 lval = (__typeof__(lval)) _res; \
2226 } while (0)
2227
2228 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2229 do { \
2230 volatile OrigFn _orig = (orig); \
2231 volatile unsigned long _argvec[3]; \
2232 volatile unsigned long _res; \
2233 _argvec[0] = (unsigned long)_orig.nraddr; \
2234 _argvec[1] = (unsigned long)arg1; \
2235 _argvec[2] = (unsigned long)arg2; \
2236 __asm__ volatile( \
2237 VALGRIND_ALIGN_STACK \
2238 "mr 11,%1\n\t" \
2239 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2240 "lwz 4,8(11)\n\t" \
2241 "lwz 11,0(11)\n\t" /* target->r11 */ \
2242 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2243 VALGRIND_RESTORE_STACK \
2244 "mr %0,3" \
2245 : /*out*/ "=r" (_res) \
2246 : /*in*/ "r" (&_argvec[0]) \
2247 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2248 ); \
2249 lval = (__typeof__(lval)) _res; \
2250 } while (0)
2251
2252 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2253 do { \
2254 volatile OrigFn _orig = (orig); \
2255 volatile unsigned long _argvec[4]; \
2256 volatile unsigned long _res; \
2257 _argvec[0] = (unsigned long)_orig.nraddr; \
2258 _argvec[1] = (unsigned long)arg1; \
2259 _argvec[2] = (unsigned long)arg2; \
2260 _argvec[3] = (unsigned long)arg3; \
2261 __asm__ volatile( \
2262 VALGRIND_ALIGN_STACK \
2263 "mr 11,%1\n\t" \
2264 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2265 "lwz 4,8(11)\n\t" \
2266 "lwz 5,12(11)\n\t" \
2267 "lwz 11,0(11)\n\t" /* target->r11 */ \
2268 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2269 VALGRIND_RESTORE_STACK \
2270 "mr %0,3" \
2271 : /*out*/ "=r" (_res) \
2272 : /*in*/ "r" (&_argvec[0]) \
2273 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2274 ); \
2275 lval = (__typeof__(lval)) _res; \
2276 } while (0)
2277
2278 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2279 do { \
2280 volatile OrigFn _orig = (orig); \
2281 volatile unsigned long _argvec[5]; \
2282 volatile unsigned long _res; \
2283 _argvec[0] = (unsigned long)_orig.nraddr; \
2284 _argvec[1] = (unsigned long)arg1; \
2285 _argvec[2] = (unsigned long)arg2; \
2286 _argvec[3] = (unsigned long)arg3; \
2287 _argvec[4] = (unsigned long)arg4; \
2288 __asm__ volatile( \
2289 VALGRIND_ALIGN_STACK \
2290 "mr 11,%1\n\t" \
2291 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2292 "lwz 4,8(11)\n\t" \
2293 "lwz 5,12(11)\n\t" \
2294 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2295 "lwz 11,0(11)\n\t" /* target->r11 */ \
2296 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2297 VALGRIND_RESTORE_STACK \
2298 "mr %0,3" \
2299 : /*out*/ "=r" (_res) \
2300 : /*in*/ "r" (&_argvec[0]) \
2301 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2302 ); \
2303 lval = (__typeof__(lval)) _res; \
2304 } while (0)
2305
2306 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2307 do { \
2308 volatile OrigFn _orig = (orig); \
2309 volatile unsigned long _argvec[6]; \
2310 volatile unsigned long _res; \
2311 _argvec[0] = (unsigned long)_orig.nraddr; \
2312 _argvec[1] = (unsigned long)arg1; \
2313 _argvec[2] = (unsigned long)arg2; \
2314 _argvec[3] = (unsigned long)arg3; \
2315 _argvec[4] = (unsigned long)arg4; \
2316 _argvec[5] = (unsigned long)arg5; \
2317 __asm__ volatile( \
2318 VALGRIND_ALIGN_STACK \
2319 "mr 11,%1\n\t" \
2320 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2321 "lwz 4,8(11)\n\t" \
2322 "lwz 5,12(11)\n\t" \
2323 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2324 "lwz 7,20(11)\n\t" \
2325 "lwz 11,0(11)\n\t" /* target->r11 */ \
2326 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2327 VALGRIND_RESTORE_STACK \
2328 "mr %0,3" \
2329 : /*out*/ "=r" (_res) \
2330 : /*in*/ "r" (&_argvec[0]) \
2331 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2332 ); \
2333 lval = (__typeof__(lval)) _res; \
2334 } while (0)
2335
2336 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2337 do { \
2338 volatile OrigFn _orig = (orig); \
2339 volatile unsigned long _argvec[7]; \
2340 volatile unsigned long _res; \
2341 _argvec[0] = (unsigned long)_orig.nraddr; \
2342 _argvec[1] = (unsigned long)arg1; \
2343 _argvec[2] = (unsigned long)arg2; \
2344 _argvec[3] = (unsigned long)arg3; \
2345 _argvec[4] = (unsigned long)arg4; \
2346 _argvec[5] = (unsigned long)arg5; \
2347 _argvec[6] = (unsigned long)arg6; \
2348 __asm__ volatile( \
2349 VALGRIND_ALIGN_STACK \
2350 "mr 11,%1\n\t" \
2351 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2352 "lwz 4,8(11)\n\t" \
2353 "lwz 5,12(11)\n\t" \
2354 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2355 "lwz 7,20(11)\n\t" \
2356 "lwz 8,24(11)\n\t" \
2357 "lwz 11,0(11)\n\t" /* target->r11 */ \
2358 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2359 VALGRIND_RESTORE_STACK \
2360 "mr %0,3" \
2361 : /*out*/ "=r" (_res) \
2362 : /*in*/ "r" (&_argvec[0]) \
2363 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2364 ); \
2365 lval = (__typeof__(lval)) _res; \
2366 } while (0)
2367
2368 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2369 arg7) \
2370 do { \
2371 volatile OrigFn _orig = (orig); \
2372 volatile unsigned long _argvec[8]; \
2373 volatile unsigned long _res; \
2374 _argvec[0] = (unsigned long)_orig.nraddr; \
2375 _argvec[1] = (unsigned long)arg1; \
2376 _argvec[2] = (unsigned long)arg2; \
2377 _argvec[3] = (unsigned long)arg3; \
2378 _argvec[4] = (unsigned long)arg4; \
2379 _argvec[5] = (unsigned long)arg5; \
2380 _argvec[6] = (unsigned long)arg6; \
2381 _argvec[7] = (unsigned long)arg7; \
2382 __asm__ volatile( \
2383 VALGRIND_ALIGN_STACK \
2384 "mr 11,%1\n\t" \
2385 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2386 "lwz 4,8(11)\n\t" \
2387 "lwz 5,12(11)\n\t" \
2388 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2389 "lwz 7,20(11)\n\t" \
2390 "lwz 8,24(11)\n\t" \
2391 "lwz 9,28(11)\n\t" \
2392 "lwz 11,0(11)\n\t" /* target->r11 */ \
2393 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2394 VALGRIND_RESTORE_STACK \
2395 "mr %0,3" \
2396 : /*out*/ "=r" (_res) \
2397 : /*in*/ "r" (&_argvec[0]) \
2398 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2399 ); \
2400 lval = (__typeof__(lval)) _res; \
2401 } while (0)
2402
2403 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2404 arg7,arg8) \
2405 do { \
2406 volatile OrigFn _orig = (orig); \
2407 volatile unsigned long _argvec[9]; \
2408 volatile unsigned long _res; \
2409 _argvec[0] = (unsigned long)_orig.nraddr; \
2410 _argvec[1] = (unsigned long)arg1; \
2411 _argvec[2] = (unsigned long)arg2; \
2412 _argvec[3] = (unsigned long)arg3; \
2413 _argvec[4] = (unsigned long)arg4; \
2414 _argvec[5] = (unsigned long)arg5; \
2415 _argvec[6] = (unsigned long)arg6; \
2416 _argvec[7] = (unsigned long)arg7; \
2417 _argvec[8] = (unsigned long)arg8; \
2418 __asm__ volatile( \
2419 VALGRIND_ALIGN_STACK \
2420 "mr 11,%1\n\t" \
2421 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2422 "lwz 4,8(11)\n\t" \
2423 "lwz 5,12(11)\n\t" \
2424 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2425 "lwz 7,20(11)\n\t" \
2426 "lwz 8,24(11)\n\t" \
2427 "lwz 9,28(11)\n\t" \
2428 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2429 "lwz 11,0(11)\n\t" /* target->r11 */ \
2430 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2431 VALGRIND_RESTORE_STACK \
2432 "mr %0,3" \
2433 : /*out*/ "=r" (_res) \
2434 : /*in*/ "r" (&_argvec[0]) \
2435 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2436 ); \
2437 lval = (__typeof__(lval)) _res; \
2438 } while (0)
2439
2440 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2441 arg7,arg8,arg9) \
2442 do { \
2443 volatile OrigFn _orig = (orig); \
2444 volatile unsigned long _argvec[10]; \
2445 volatile unsigned long _res; \
2446 _argvec[0] = (unsigned long)_orig.nraddr; \
2447 _argvec[1] = (unsigned long)arg1; \
2448 _argvec[2] = (unsigned long)arg2; \
2449 _argvec[3] = (unsigned long)arg3; \
2450 _argvec[4] = (unsigned long)arg4; \
2451 _argvec[5] = (unsigned long)arg5; \
2452 _argvec[6] = (unsigned long)arg6; \
2453 _argvec[7] = (unsigned long)arg7; \
2454 _argvec[8] = (unsigned long)arg8; \
2455 _argvec[9] = (unsigned long)arg9; \
2456 __asm__ volatile( \
2457 VALGRIND_ALIGN_STACK \
2458 "mr 11,%1\n\t" \
2459 "addi 1,1,-16\n\t" \
2460 /* arg9 */ \
2461 "lwz 3,36(11)\n\t" \
2462 "stw 3,8(1)\n\t" \
2463 /* args1-8 */ \
2464 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2465 "lwz 4,8(11)\n\t" \
2466 "lwz 5,12(11)\n\t" \
2467 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2468 "lwz 7,20(11)\n\t" \
2469 "lwz 8,24(11)\n\t" \
2470 "lwz 9,28(11)\n\t" \
2471 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2472 "lwz 11,0(11)\n\t" /* target->r11 */ \
2473 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2474 VALGRIND_RESTORE_STACK \
2475 "mr %0,3" \
2476 : /*out*/ "=r" (_res) \
2477 : /*in*/ "r" (&_argvec[0]) \
2478 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2479 ); \
2480 lval = (__typeof__(lval)) _res; \
2481 } while (0)
2482
2483 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2484 arg7,arg8,arg9,arg10) \
2485 do { \
2486 volatile OrigFn _orig = (orig); \
2487 volatile unsigned long _argvec[11]; \
2488 volatile unsigned long _res; \
2489 _argvec[0] = (unsigned long)_orig.nraddr; \
2490 _argvec[1] = (unsigned long)arg1; \
2491 _argvec[2] = (unsigned long)arg2; \
2492 _argvec[3] = (unsigned long)arg3; \
2493 _argvec[4] = (unsigned long)arg4; \
2494 _argvec[5] = (unsigned long)arg5; \
2495 _argvec[6] = (unsigned long)arg6; \
2496 _argvec[7] = (unsigned long)arg7; \
2497 _argvec[8] = (unsigned long)arg8; \
2498 _argvec[9] = (unsigned long)arg9; \
2499 _argvec[10] = (unsigned long)arg10; \
2500 __asm__ volatile( \
2501 VALGRIND_ALIGN_STACK \
2502 "mr 11,%1\n\t" \
2503 "addi 1,1,-16\n\t" \
2504 /* arg10 */ \
2505 "lwz 3,40(11)\n\t" \
2506 "stw 3,12(1)\n\t" \
2507 /* arg9 */ \
2508 "lwz 3,36(11)\n\t" \
2509 "stw 3,8(1)\n\t" \
2510 /* args1-8 */ \
2511 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2512 "lwz 4,8(11)\n\t" \
2513 "lwz 5,12(11)\n\t" \
2514 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2515 "lwz 7,20(11)\n\t" \
2516 "lwz 8,24(11)\n\t" \
2517 "lwz 9,28(11)\n\t" \
2518 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2519 "lwz 11,0(11)\n\t" /* target->r11 */ \
2520 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2521 VALGRIND_RESTORE_STACK \
2522 "mr %0,3" \
2523 : /*out*/ "=r" (_res) \
2524 : /*in*/ "r" (&_argvec[0]) \
2525 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2526 ); \
2527 lval = (__typeof__(lval)) _res; \
2528 } while (0)
2529
2530 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2531 arg7,arg8,arg9,arg10,arg11) \
2532 do { \
2533 volatile OrigFn _orig = (orig); \
2534 volatile unsigned long _argvec[12]; \
2535 volatile unsigned long _res; \
2536 _argvec[0] = (unsigned long)_orig.nraddr; \
2537 _argvec[1] = (unsigned long)arg1; \
2538 _argvec[2] = (unsigned long)arg2; \
2539 _argvec[3] = (unsigned long)arg3; \
2540 _argvec[4] = (unsigned long)arg4; \
2541 _argvec[5] = (unsigned long)arg5; \
2542 _argvec[6] = (unsigned long)arg6; \
2543 _argvec[7] = (unsigned long)arg7; \
2544 _argvec[8] = (unsigned long)arg8; \
2545 _argvec[9] = (unsigned long)arg9; \
2546 _argvec[10] = (unsigned long)arg10; \
2547 _argvec[11] = (unsigned long)arg11; \
2548 __asm__ volatile( \
2549 VALGRIND_ALIGN_STACK \
2550 "mr 11,%1\n\t" \
2551 "addi 1,1,-32\n\t" \
2552 /* arg11 */ \
2553 "lwz 3,44(11)\n\t" \
2554 "stw 3,16(1)\n\t" \
2555 /* arg10 */ \
2556 "lwz 3,40(11)\n\t" \
2557 "stw 3,12(1)\n\t" \
2558 /* arg9 */ \
2559 "lwz 3,36(11)\n\t" \
2560 "stw 3,8(1)\n\t" \
2561 /* args1-8 */ \
2562 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2563 "lwz 4,8(11)\n\t" \
2564 "lwz 5,12(11)\n\t" \
2565 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2566 "lwz 7,20(11)\n\t" \
2567 "lwz 8,24(11)\n\t" \
2568 "lwz 9,28(11)\n\t" \
2569 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2570 "lwz 11,0(11)\n\t" /* target->r11 */ \
2571 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2572 VALGRIND_RESTORE_STACK \
2573 "mr %0,3" \
2574 : /*out*/ "=r" (_res) \
2575 : /*in*/ "r" (&_argvec[0]) \
2576 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2577 ); \
2578 lval = (__typeof__(lval)) _res; \
2579 } while (0)
2580
2581 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2582 arg7,arg8,arg9,arg10,arg11,arg12) \
2583 do { \
2584 volatile OrigFn _orig = (orig); \
2585 volatile unsigned long _argvec[13]; \
2586 volatile unsigned long _res; \
2587 _argvec[0] = (unsigned long)_orig.nraddr; \
2588 _argvec[1] = (unsigned long)arg1; \
2589 _argvec[2] = (unsigned long)arg2; \
2590 _argvec[3] = (unsigned long)arg3; \
2591 _argvec[4] = (unsigned long)arg4; \
2592 _argvec[5] = (unsigned long)arg5; \
2593 _argvec[6] = (unsigned long)arg6; \
2594 _argvec[7] = (unsigned long)arg7; \
2595 _argvec[8] = (unsigned long)arg8; \
2596 _argvec[9] = (unsigned long)arg9; \
2597 _argvec[10] = (unsigned long)arg10; \
2598 _argvec[11] = (unsigned long)arg11; \
2599 _argvec[12] = (unsigned long)arg12; \
2600 __asm__ volatile( \
2601 VALGRIND_ALIGN_STACK \
2602 "mr 11,%1\n\t" \
2603 "addi 1,1,-32\n\t" \
2604 /* arg12 */ \
2605 "lwz 3,48(11)\n\t" \
2606 "stw 3,20(1)\n\t" \
2607 /* arg11 */ \
2608 "lwz 3,44(11)\n\t" \
2609 "stw 3,16(1)\n\t" \
2610 /* arg10 */ \
2611 "lwz 3,40(11)\n\t" \
2612 "stw 3,12(1)\n\t" \
2613 /* arg9 */ \
2614 "lwz 3,36(11)\n\t" \
2615 "stw 3,8(1)\n\t" \
2616 /* args1-8 */ \
2617 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2618 "lwz 4,8(11)\n\t" \
2619 "lwz 5,12(11)\n\t" \
2620 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2621 "lwz 7,20(11)\n\t" \
2622 "lwz 8,24(11)\n\t" \
2623 "lwz 9,28(11)\n\t" \
2624 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2625 "lwz 11,0(11)\n\t" /* target->r11 */ \
2626 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2627 VALGRIND_RESTORE_STACK \
2628 "mr %0,3" \
2629 : /*out*/ "=r" (_res) \
2630 : /*in*/ "r" (&_argvec[0]) \
2631 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2632 ); \
2633 lval = (__typeof__(lval)) _res; \
2634 } while (0)
2635
2636 #endif /* PLAT_ppc32_linux */
2637
2638 /* ------------------------ ppc64-linux ------------------------ */
2639
2640 #if defined(PLAT_ppc64be_linux)
2641
2642 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2643
2644 /* These regs are trashed by the hidden call. */
2645 #define __CALLER_SAVED_REGS \
2646 "lr", "ctr", "xer", \
2647 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2648 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2649 "r11", "r12", "r13"
2650
2651 /* Macros to save and align the stack before making a function
2652 call and restore it afterwards as gcc may not keep the stack
2653 pointer aligned if it doesn't realise calls are being made
2654 to other functions. */
2655
2656 #define VALGRIND_ALIGN_STACK \
2657 "mr 28,1\n\t" \
2658 "rldicr 1,1,0,59\n\t"
2659 #define VALGRIND_RESTORE_STACK \
2660 "mr 1,28\n\t"
2661
2662 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2663 long) == 8. */
2664
2665 #define CALL_FN_W_v(lval, orig) \
2666 do { \
2667 volatile OrigFn _orig = (orig); \
2668 volatile unsigned long _argvec[3+0]; \
2669 volatile unsigned long _res; \
2670 /* _argvec[0] holds current r2 across the call */ \
2671 _argvec[1] = (unsigned long)_orig.r2; \
2672 _argvec[2] = (unsigned long)_orig.nraddr; \
2673 __asm__ volatile( \
2674 VALGRIND_ALIGN_STACK \
2675 "mr 11,%1\n\t" \
2676 "std 2,-16(11)\n\t" /* save tocptr */ \
2677 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2678 "ld 11, 0(11)\n\t" /* target->r11 */ \
2679 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2680 "mr 11,%1\n\t" \
2681 "mr %0,3\n\t" \
2682 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2683 VALGRIND_RESTORE_STACK \
2684 : /*out*/ "=r" (_res) \
2685 : /*in*/ "r" (&_argvec[2]) \
2686 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2687 ); \
2688 lval = (__typeof__(lval)) _res; \
2689 } while (0)
2690
2691 #define CALL_FN_W_W(lval, orig, arg1) \
2692 do { \
2693 volatile OrigFn _orig = (orig); \
2694 volatile unsigned long _argvec[3+1]; \
2695 volatile unsigned long _res; \
2696 /* _argvec[0] holds current r2 across the call */ \
2697 _argvec[1] = (unsigned long)_orig.r2; \
2698 _argvec[2] = (unsigned long)_orig.nraddr; \
2699 _argvec[2+1] = (unsigned long)arg1; \
2700 __asm__ volatile( \
2701 VALGRIND_ALIGN_STACK \
2702 "mr 11,%1\n\t" \
2703 "std 2,-16(11)\n\t" /* save tocptr */ \
2704 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2705 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2706 "ld 11, 0(11)\n\t" /* target->r11 */ \
2707 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2708 "mr 11,%1\n\t" \
2709 "mr %0,3\n\t" \
2710 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2711 VALGRIND_RESTORE_STACK \
2712 : /*out*/ "=r" (_res) \
2713 : /*in*/ "r" (&_argvec[2]) \
2714 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2715 ); \
2716 lval = (__typeof__(lval)) _res; \
2717 } while (0)
2718
2719 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2720 do { \
2721 volatile OrigFn _orig = (orig); \
2722 volatile unsigned long _argvec[3+2]; \
2723 volatile unsigned long _res; \
2724 /* _argvec[0] holds current r2 across the call */ \
2725 _argvec[1] = (unsigned long)_orig.r2; \
2726 _argvec[2] = (unsigned long)_orig.nraddr; \
2727 _argvec[2+1] = (unsigned long)arg1; \
2728 _argvec[2+2] = (unsigned long)arg2; \
2729 __asm__ volatile( \
2730 VALGRIND_ALIGN_STACK \
2731 "mr 11,%1\n\t" \
2732 "std 2,-16(11)\n\t" /* save tocptr */ \
2733 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2734 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2735 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2736 "ld 11, 0(11)\n\t" /* target->r11 */ \
2737 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2738 "mr 11,%1\n\t" \
2739 "mr %0,3\n\t" \
2740 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2741 VALGRIND_RESTORE_STACK \
2742 : /*out*/ "=r" (_res) \
2743 : /*in*/ "r" (&_argvec[2]) \
2744 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2745 ); \
2746 lval = (__typeof__(lval)) _res; \
2747 } while (0)
2748
2749 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2750 do { \
2751 volatile OrigFn _orig = (orig); \
2752 volatile unsigned long _argvec[3+3]; \
2753 volatile unsigned long _res; \
2754 /* _argvec[0] holds current r2 across the call */ \
2755 _argvec[1] = (unsigned long)_orig.r2; \
2756 _argvec[2] = (unsigned long)_orig.nraddr; \
2757 _argvec[2+1] = (unsigned long)arg1; \
2758 _argvec[2+2] = (unsigned long)arg2; \
2759 _argvec[2+3] = (unsigned long)arg3; \
2760 __asm__ volatile( \
2761 VALGRIND_ALIGN_STACK \
2762 "mr 11,%1\n\t" \
2763 "std 2,-16(11)\n\t" /* save tocptr */ \
2764 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2765 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2766 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2767 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2768 "ld 11, 0(11)\n\t" /* target->r11 */ \
2769 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2770 "mr 11,%1\n\t" \
2771 "mr %0,3\n\t" \
2772 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2773 VALGRIND_RESTORE_STACK \
2774 : /*out*/ "=r" (_res) \
2775 : /*in*/ "r" (&_argvec[2]) \
2776 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2777 ); \
2778 lval = (__typeof__(lval)) _res; \
2779 } while (0)
2780
2781 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2782 do { \
2783 volatile OrigFn _orig = (orig); \
2784 volatile unsigned long _argvec[3+4]; \
2785 volatile unsigned long _res; \
2786 /* _argvec[0] holds current r2 across the call */ \
2787 _argvec[1] = (unsigned long)_orig.r2; \
2788 _argvec[2] = (unsigned long)_orig.nraddr; \
2789 _argvec[2+1] = (unsigned long)arg1; \
2790 _argvec[2+2] = (unsigned long)arg2; \
2791 _argvec[2+3] = (unsigned long)arg3; \
2792 _argvec[2+4] = (unsigned long)arg4; \
2793 __asm__ volatile( \
2794 VALGRIND_ALIGN_STACK \
2795 "mr 11,%1\n\t" \
2796 "std 2,-16(11)\n\t" /* save tocptr */ \
2797 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2798 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2799 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2800 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2801 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2802 "ld 11, 0(11)\n\t" /* target->r11 */ \
2803 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2804 "mr 11,%1\n\t" \
2805 "mr %0,3\n\t" \
2806 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2807 VALGRIND_RESTORE_STACK \
2808 : /*out*/ "=r" (_res) \
2809 : /*in*/ "r" (&_argvec[2]) \
2810 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2811 ); \
2812 lval = (__typeof__(lval)) _res; \
2813 } while (0)
2814
2815 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2816 do { \
2817 volatile OrigFn _orig = (orig); \
2818 volatile unsigned long _argvec[3+5]; \
2819 volatile unsigned long _res; \
2820 /* _argvec[0] holds current r2 across the call */ \
2821 _argvec[1] = (unsigned long)_orig.r2; \
2822 _argvec[2] = (unsigned long)_orig.nraddr; \
2823 _argvec[2+1] = (unsigned long)arg1; \
2824 _argvec[2+2] = (unsigned long)arg2; \
2825 _argvec[2+3] = (unsigned long)arg3; \
2826 _argvec[2+4] = (unsigned long)arg4; \
2827 _argvec[2+5] = (unsigned long)arg5; \
2828 __asm__ volatile( \
2829 VALGRIND_ALIGN_STACK \
2830 "mr 11,%1\n\t" \
2831 "std 2,-16(11)\n\t" /* save tocptr */ \
2832 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2833 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2834 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2835 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2836 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2837 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2838 "ld 11, 0(11)\n\t" /* target->r11 */ \
2839 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2840 "mr 11,%1\n\t" \
2841 "mr %0,3\n\t" \
2842 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2843 VALGRIND_RESTORE_STACK \
2844 : /*out*/ "=r" (_res) \
2845 : /*in*/ "r" (&_argvec[2]) \
2846 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2847 ); \
2848 lval = (__typeof__(lval)) _res; \
2849 } while (0)
2850
2851 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2852 do { \
2853 volatile OrigFn _orig = (orig); \
2854 volatile unsigned long _argvec[3+6]; \
2855 volatile unsigned long _res; \
2856 /* _argvec[0] holds current r2 across the call */ \
2857 _argvec[1] = (unsigned long)_orig.r2; \
2858 _argvec[2] = (unsigned long)_orig.nraddr; \
2859 _argvec[2+1] = (unsigned long)arg1; \
2860 _argvec[2+2] = (unsigned long)arg2; \
2861 _argvec[2+3] = (unsigned long)arg3; \
2862 _argvec[2+4] = (unsigned long)arg4; \
2863 _argvec[2+5] = (unsigned long)arg5; \
2864 _argvec[2+6] = (unsigned long)arg6; \
2865 __asm__ volatile( \
2866 VALGRIND_ALIGN_STACK \
2867 "mr 11,%1\n\t" \
2868 "std 2,-16(11)\n\t" /* save tocptr */ \
2869 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2870 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2871 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2872 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2873 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2874 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2875 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2876 "ld 11, 0(11)\n\t" /* target->r11 */ \
2877 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2878 "mr 11,%1\n\t" \
2879 "mr %0,3\n\t" \
2880 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2881 VALGRIND_RESTORE_STACK \
2882 : /*out*/ "=r" (_res) \
2883 : /*in*/ "r" (&_argvec[2]) \
2884 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2885 ); \
2886 lval = (__typeof__(lval)) _res; \
2887 } while (0)
2888
2889 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2890 arg7) \
2891 do { \
2892 volatile OrigFn _orig = (orig); \
2893 volatile unsigned long _argvec[3+7]; \
2894 volatile unsigned long _res; \
2895 /* _argvec[0] holds current r2 across the call */ \
2896 _argvec[1] = (unsigned long)_orig.r2; \
2897 _argvec[2] = (unsigned long)_orig.nraddr; \
2898 _argvec[2+1] = (unsigned long)arg1; \
2899 _argvec[2+2] = (unsigned long)arg2; \
2900 _argvec[2+3] = (unsigned long)arg3; \
2901 _argvec[2+4] = (unsigned long)arg4; \
2902 _argvec[2+5] = (unsigned long)arg5; \
2903 _argvec[2+6] = (unsigned long)arg6; \
2904 _argvec[2+7] = (unsigned long)arg7; \
2905 __asm__ volatile( \
2906 VALGRIND_ALIGN_STACK \
2907 "mr 11,%1\n\t" \
2908 "std 2,-16(11)\n\t" /* save tocptr */ \
2909 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2910 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2911 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2912 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2913 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2914 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2915 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2916 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2917 "ld 11, 0(11)\n\t" /* target->r11 */ \
2918 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2919 "mr 11,%1\n\t" \
2920 "mr %0,3\n\t" \
2921 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2922 VALGRIND_RESTORE_STACK \
2923 : /*out*/ "=r" (_res) \
2924 : /*in*/ "r" (&_argvec[2]) \
2925 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2926 ); \
2927 lval = (__typeof__(lval)) _res; \
2928 } while (0)
2929
2930 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2931 arg7,arg8) \
2932 do { \
2933 volatile OrigFn _orig = (orig); \
2934 volatile unsigned long _argvec[3+8]; \
2935 volatile unsigned long _res; \
2936 /* _argvec[0] holds current r2 across the call */ \
2937 _argvec[1] = (unsigned long)_orig.r2; \
2938 _argvec[2] = (unsigned long)_orig.nraddr; \
2939 _argvec[2+1] = (unsigned long)arg1; \
2940 _argvec[2+2] = (unsigned long)arg2; \
2941 _argvec[2+3] = (unsigned long)arg3; \
2942 _argvec[2+4] = (unsigned long)arg4; \
2943 _argvec[2+5] = (unsigned long)arg5; \
2944 _argvec[2+6] = (unsigned long)arg6; \
2945 _argvec[2+7] = (unsigned long)arg7; \
2946 _argvec[2+8] = (unsigned long)arg8; \
2947 __asm__ volatile( \
2948 VALGRIND_ALIGN_STACK \
2949 "mr 11,%1\n\t" \
2950 "std 2,-16(11)\n\t" /* save tocptr */ \
2951 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2952 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2953 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2954 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2955 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2956 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2957 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2958 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2959 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2960 "ld 11, 0(11)\n\t" /* target->r11 */ \
2961 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2962 "mr 11,%1\n\t" \
2963 "mr %0,3\n\t" \
2964 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2965 VALGRIND_RESTORE_STACK \
2966 : /*out*/ "=r" (_res) \
2967 : /*in*/ "r" (&_argvec[2]) \
2968 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2969 ); \
2970 lval = (__typeof__(lval)) _res; \
2971 } while (0)
2972
2973 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2974 arg7,arg8,arg9) \
2975 do { \
2976 volatile OrigFn _orig = (orig); \
2977 volatile unsigned long _argvec[3+9]; \
2978 volatile unsigned long _res; \
2979 /* _argvec[0] holds current r2 across the call */ \
2980 _argvec[1] = (unsigned long)_orig.r2; \
2981 _argvec[2] = (unsigned long)_orig.nraddr; \
2982 _argvec[2+1] = (unsigned long)arg1; \
2983 _argvec[2+2] = (unsigned long)arg2; \
2984 _argvec[2+3] = (unsigned long)arg3; \
2985 _argvec[2+4] = (unsigned long)arg4; \
2986 _argvec[2+5] = (unsigned long)arg5; \
2987 _argvec[2+6] = (unsigned long)arg6; \
2988 _argvec[2+7] = (unsigned long)arg7; \
2989 _argvec[2+8] = (unsigned long)arg8; \
2990 _argvec[2+9] = (unsigned long)arg9; \
2991 __asm__ volatile( \
2992 VALGRIND_ALIGN_STACK \
2993 "mr 11,%1\n\t" \
2994 "std 2,-16(11)\n\t" /* save tocptr */ \
2995 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2996 "addi 1,1,-128\n\t" /* expand stack frame */ \
2997 /* arg9 */ \
2998 "ld 3,72(11)\n\t" \
2999 "std 3,112(1)\n\t" \
3000 /* args1-8 */ \
3001 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3002 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3003 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3004 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3005 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3006 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3007 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3008 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3009 "ld 11, 0(11)\n\t" /* target->r11 */ \
3010 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3011 "mr 11,%1\n\t" \
3012 "mr %0,3\n\t" \
3013 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3014 VALGRIND_RESTORE_STACK \
3015 : /*out*/ "=r" (_res) \
3016 : /*in*/ "r" (&_argvec[2]) \
3017 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3018 ); \
3019 lval = (__typeof__(lval)) _res; \
3020 } while (0)
3021
3022 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3023 arg7,arg8,arg9,arg10) \
3024 do { \
3025 volatile OrigFn _orig = (orig); \
3026 volatile unsigned long _argvec[3+10]; \
3027 volatile unsigned long _res; \
3028 /* _argvec[0] holds current r2 across the call */ \
3029 _argvec[1] = (unsigned long)_orig.r2; \
3030 _argvec[2] = (unsigned long)_orig.nraddr; \
3031 _argvec[2+1] = (unsigned long)arg1; \
3032 _argvec[2+2] = (unsigned long)arg2; \
3033 _argvec[2+3] = (unsigned long)arg3; \
3034 _argvec[2+4] = (unsigned long)arg4; \
3035 _argvec[2+5] = (unsigned long)arg5; \
3036 _argvec[2+6] = (unsigned long)arg6; \
3037 _argvec[2+7] = (unsigned long)arg7; \
3038 _argvec[2+8] = (unsigned long)arg8; \
3039 _argvec[2+9] = (unsigned long)arg9; \
3040 _argvec[2+10] = (unsigned long)arg10; \
3041 __asm__ volatile( \
3042 VALGRIND_ALIGN_STACK \
3043 "mr 11,%1\n\t" \
3044 "std 2,-16(11)\n\t" /* save tocptr */ \
3045 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3046 "addi 1,1,-128\n\t" /* expand stack frame */ \
3047 /* arg10 */ \
3048 "ld 3,80(11)\n\t" \
3049 "std 3,120(1)\n\t" \
3050 /* arg9 */ \
3051 "ld 3,72(11)\n\t" \
3052 "std 3,112(1)\n\t" \
3053 /* args1-8 */ \
3054 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3055 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3056 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3057 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3058 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3059 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3060 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3061 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3062 "ld 11, 0(11)\n\t" /* target->r11 */ \
3063 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3064 "mr 11,%1\n\t" \
3065 "mr %0,3\n\t" \
3066 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3067 VALGRIND_RESTORE_STACK \
3068 : /*out*/ "=r" (_res) \
3069 : /*in*/ "r" (&_argvec[2]) \
3070 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3071 ); \
3072 lval = (__typeof__(lval)) _res; \
3073 } while (0)
3074
3075 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3076 arg7,arg8,arg9,arg10,arg11) \
3077 do { \
3078 volatile OrigFn _orig = (orig); \
3079 volatile unsigned long _argvec[3+11]; \
3080 volatile unsigned long _res; \
3081 /* _argvec[0] holds current r2 across the call */ \
3082 _argvec[1] = (unsigned long)_orig.r2; \
3083 _argvec[2] = (unsigned long)_orig.nraddr; \
3084 _argvec[2+1] = (unsigned long)arg1; \
3085 _argvec[2+2] = (unsigned long)arg2; \
3086 _argvec[2+3] = (unsigned long)arg3; \
3087 _argvec[2+4] = (unsigned long)arg4; \
3088 _argvec[2+5] = (unsigned long)arg5; \
3089 _argvec[2+6] = (unsigned long)arg6; \
3090 _argvec[2+7] = (unsigned long)arg7; \
3091 _argvec[2+8] = (unsigned long)arg8; \
3092 _argvec[2+9] = (unsigned long)arg9; \
3093 _argvec[2+10] = (unsigned long)arg10; \
3094 _argvec[2+11] = (unsigned long)arg11; \
3095 __asm__ volatile( \
3096 VALGRIND_ALIGN_STACK \
3097 "mr 11,%1\n\t" \
3098 "std 2,-16(11)\n\t" /* save tocptr */ \
3099 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3100 "addi 1,1,-144\n\t" /* expand stack frame */ \
3101 /* arg11 */ \
3102 "ld 3,88(11)\n\t" \
3103 "std 3,128(1)\n\t" \
3104 /* arg10 */ \
3105 "ld 3,80(11)\n\t" \
3106 "std 3,120(1)\n\t" \
3107 /* arg9 */ \
3108 "ld 3,72(11)\n\t" \
3109 "std 3,112(1)\n\t" \
3110 /* args1-8 */ \
3111 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3112 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3113 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3114 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3115 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3116 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3117 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3118 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3119 "ld 11, 0(11)\n\t" /* target->r11 */ \
3120 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3121 "mr 11,%1\n\t" \
3122 "mr %0,3\n\t" \
3123 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3124 VALGRIND_RESTORE_STACK \
3125 : /*out*/ "=r" (_res) \
3126 : /*in*/ "r" (&_argvec[2]) \
3127 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3128 ); \
3129 lval = (__typeof__(lval)) _res; \
3130 } while (0)
3131
3132 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3133 arg7,arg8,arg9,arg10,arg11,arg12) \
3134 do { \
3135 volatile OrigFn _orig = (orig); \
3136 volatile unsigned long _argvec[3+12]; \
3137 volatile unsigned long _res; \
3138 /* _argvec[0] holds current r2 across the call */ \
3139 _argvec[1] = (unsigned long)_orig.r2; \
3140 _argvec[2] = (unsigned long)_orig.nraddr; \
3141 _argvec[2+1] = (unsigned long)arg1; \
3142 _argvec[2+2] = (unsigned long)arg2; \
3143 _argvec[2+3] = (unsigned long)arg3; \
3144 _argvec[2+4] = (unsigned long)arg4; \
3145 _argvec[2+5] = (unsigned long)arg5; \
3146 _argvec[2+6] = (unsigned long)arg6; \
3147 _argvec[2+7] = (unsigned long)arg7; \
3148 _argvec[2+8] = (unsigned long)arg8; \
3149 _argvec[2+9] = (unsigned long)arg9; \
3150 _argvec[2+10] = (unsigned long)arg10; \
3151 _argvec[2+11] = (unsigned long)arg11; \
3152 _argvec[2+12] = (unsigned long)arg12; \
3153 __asm__ volatile( \
3154 VALGRIND_ALIGN_STACK \
3155 "mr 11,%1\n\t" \
3156 "std 2,-16(11)\n\t" /* save tocptr */ \
3157 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3158 "addi 1,1,-144\n\t" /* expand stack frame */ \
3159 /* arg12 */ \
3160 "ld 3,96(11)\n\t" \
3161 "std 3,136(1)\n\t" \
3162 /* arg11 */ \
3163 "ld 3,88(11)\n\t" \
3164 "std 3,128(1)\n\t" \
3165 /* arg10 */ \
3166 "ld 3,80(11)\n\t" \
3167 "std 3,120(1)\n\t" \
3168 /* arg9 */ \
3169 "ld 3,72(11)\n\t" \
3170 "std 3,112(1)\n\t" \
3171 /* args1-8 */ \
3172 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3173 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3174 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3175 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3176 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3177 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3178 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3179 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3180 "ld 11, 0(11)\n\t" /* target->r11 */ \
3181 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3182 "mr 11,%1\n\t" \
3183 "mr %0,3\n\t" \
3184 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3185 VALGRIND_RESTORE_STACK \
3186 : /*out*/ "=r" (_res) \
3187 : /*in*/ "r" (&_argvec[2]) \
3188 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3189 ); \
3190 lval = (__typeof__(lval)) _res; \
3191 } while (0)
3192
3193 #endif /* PLAT_ppc64be_linux */
3194
3195 /* ------------------------- ppc64le-linux ----------------------- */
3196 #if defined(PLAT_ppc64le_linux)
3197
3198 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3199
3200 /* These regs are trashed by the hidden call. */
3201 #define __CALLER_SAVED_REGS \
3202 "lr", "ctr", "xer", \
3203 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3204 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3205 "r11", "r12", "r13"
3206
3207 /* Macros to save and align the stack before making a function
3208 call and restore it afterwards as gcc may not keep the stack
3209 pointer aligned if it doesn't realise calls are being made
3210 to other functions. */
3211
3212 #define VALGRIND_ALIGN_STACK \
3213 "mr 28,1\n\t" \
3214 "rldicr 1,1,0,59\n\t"
3215 #define VALGRIND_RESTORE_STACK \
3216 "mr 1,28\n\t"
3217
3218 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3219 long) == 8. */
3220
3221 #define CALL_FN_W_v(lval, orig) \
3222 do { \
3223 volatile OrigFn _orig = (orig); \
3224 volatile unsigned long _argvec[3+0]; \
3225 volatile unsigned long _res; \
3226 /* _argvec[0] holds current r2 across the call */ \
3227 _argvec[1] = (unsigned long)_orig.r2; \
3228 _argvec[2] = (unsigned long)_orig.nraddr; \
3229 __asm__ volatile( \
3230 VALGRIND_ALIGN_STACK \
3231 "mr 12,%1\n\t" \
3232 "std 2,-16(12)\n\t" /* save tocptr */ \
3233 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3234 "ld 12, 0(12)\n\t" /* target->r12 */ \
3235 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3236 "mr 12,%1\n\t" \
3237 "mr %0,3\n\t" \
3238 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3239 VALGRIND_RESTORE_STACK \
3240 : /*out*/ "=r" (_res) \
3241 : /*in*/ "r" (&_argvec[2]) \
3242 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3243 ); \
3244 lval = (__typeof__(lval)) _res; \
3245 } while (0)
3246
3247 #define CALL_FN_W_W(lval, orig, arg1) \
3248 do { \
3249 volatile OrigFn _orig = (orig); \
3250 volatile unsigned long _argvec[3+1]; \
3251 volatile unsigned long _res; \
3252 /* _argvec[0] holds current r2 across the call */ \
3253 _argvec[1] = (unsigned long)_orig.r2; \
3254 _argvec[2] = (unsigned long)_orig.nraddr; \
3255 _argvec[2+1] = (unsigned long)arg1; \
3256 __asm__ volatile( \
3257 VALGRIND_ALIGN_STACK \
3258 "mr 12,%1\n\t" \
3259 "std 2,-16(12)\n\t" /* save tocptr */ \
3260 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3261 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3262 "ld 12, 0(12)\n\t" /* target->r12 */ \
3263 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3264 "mr 12,%1\n\t" \
3265 "mr %0,3\n\t" \
3266 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3267 VALGRIND_RESTORE_STACK \
3268 : /*out*/ "=r" (_res) \
3269 : /*in*/ "r" (&_argvec[2]) \
3270 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3271 ); \
3272 lval = (__typeof__(lval)) _res; \
3273 } while (0)
3274
3275 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3276 do { \
3277 volatile OrigFn _orig = (orig); \
3278 volatile unsigned long _argvec[3+2]; \
3279 volatile unsigned long _res; \
3280 /* _argvec[0] holds current r2 across the call */ \
3281 _argvec[1] = (unsigned long)_orig.r2; \
3282 _argvec[2] = (unsigned long)_orig.nraddr; \
3283 _argvec[2+1] = (unsigned long)arg1; \
3284 _argvec[2+2] = (unsigned long)arg2; \
3285 __asm__ volatile( \
3286 VALGRIND_ALIGN_STACK \
3287 "mr 12,%1\n\t" \
3288 "std 2,-16(12)\n\t" /* save tocptr */ \
3289 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3290 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3291 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3292 "ld 12, 0(12)\n\t" /* target->r12 */ \
3293 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3294 "mr 12,%1\n\t" \
3295 "mr %0,3\n\t" \
3296 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3297 VALGRIND_RESTORE_STACK \
3298 : /*out*/ "=r" (_res) \
3299 : /*in*/ "r" (&_argvec[2]) \
3300 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3301 ); \
3302 lval = (__typeof__(lval)) _res; \
3303 } while (0)
3304
3305 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3306 do { \
3307 volatile OrigFn _orig = (orig); \
3308 volatile unsigned long _argvec[3+3]; \
3309 volatile unsigned long _res; \
3310 /* _argvec[0] holds current r2 across the call */ \
3311 _argvec[1] = (unsigned long)_orig.r2; \
3312 _argvec[2] = (unsigned long)_orig.nraddr; \
3313 _argvec[2+1] = (unsigned long)arg1; \
3314 _argvec[2+2] = (unsigned long)arg2; \
3315 _argvec[2+3] = (unsigned long)arg3; \
3316 __asm__ volatile( \
3317 VALGRIND_ALIGN_STACK \
3318 "mr 12,%1\n\t" \
3319 "std 2,-16(12)\n\t" /* save tocptr */ \
3320 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3321 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3322 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3323 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3324 "ld 12, 0(12)\n\t" /* target->r12 */ \
3325 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3326 "mr 12,%1\n\t" \
3327 "mr %0,3\n\t" \
3328 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3329 VALGRIND_RESTORE_STACK \
3330 : /*out*/ "=r" (_res) \
3331 : /*in*/ "r" (&_argvec[2]) \
3332 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3333 ); \
3334 lval = (__typeof__(lval)) _res; \
3335 } while (0)
3336
3337 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3338 do { \
3339 volatile OrigFn _orig = (orig); \
3340 volatile unsigned long _argvec[3+4]; \
3341 volatile unsigned long _res; \
3342 /* _argvec[0] holds current r2 across the call */ \
3343 _argvec[1] = (unsigned long)_orig.r2; \
3344 _argvec[2] = (unsigned long)_orig.nraddr; \
3345 _argvec[2+1] = (unsigned long)arg1; \
3346 _argvec[2+2] = (unsigned long)arg2; \
3347 _argvec[2+3] = (unsigned long)arg3; \
3348 _argvec[2+4] = (unsigned long)arg4; \
3349 __asm__ volatile( \
3350 VALGRIND_ALIGN_STACK \
3351 "mr 12,%1\n\t" \
3352 "std 2,-16(12)\n\t" /* save tocptr */ \
3353 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3354 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3355 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3356 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3357 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3358 "ld 12, 0(12)\n\t" /* target->r12 */ \
3359 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3360 "mr 12,%1\n\t" \
3361 "mr %0,3\n\t" \
3362 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3363 VALGRIND_RESTORE_STACK \
3364 : /*out*/ "=r" (_res) \
3365 : /*in*/ "r" (&_argvec[2]) \
3366 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3367 ); \
3368 lval = (__typeof__(lval)) _res; \
3369 } while (0)
3370
3371 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3372 do { \
3373 volatile OrigFn _orig = (orig); \
3374 volatile unsigned long _argvec[3+5]; \
3375 volatile unsigned long _res; \
3376 /* _argvec[0] holds current r2 across the call */ \
3377 _argvec[1] = (unsigned long)_orig.r2; \
3378 _argvec[2] = (unsigned long)_orig.nraddr; \
3379 _argvec[2+1] = (unsigned long)arg1; \
3380 _argvec[2+2] = (unsigned long)arg2; \
3381 _argvec[2+3] = (unsigned long)arg3; \
3382 _argvec[2+4] = (unsigned long)arg4; \
3383 _argvec[2+5] = (unsigned long)arg5; \
3384 __asm__ volatile( \
3385 VALGRIND_ALIGN_STACK \
3386 "mr 12,%1\n\t" \
3387 "std 2,-16(12)\n\t" /* save tocptr */ \
3388 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3389 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3390 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3391 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3392 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3393 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3394 "ld 12, 0(12)\n\t" /* target->r12 */ \
3395 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3396 "mr 12,%1\n\t" \
3397 "mr %0,3\n\t" \
3398 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3399 VALGRIND_RESTORE_STACK \
3400 : /*out*/ "=r" (_res) \
3401 : /*in*/ "r" (&_argvec[2]) \
3402 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3403 ); \
3404 lval = (__typeof__(lval)) _res; \
3405 } while (0)
3406
3407 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3408 do { \
3409 volatile OrigFn _orig = (orig); \
3410 volatile unsigned long _argvec[3+6]; \
3411 volatile unsigned long _res; \
3412 /* _argvec[0] holds current r2 across the call */ \
3413 _argvec[1] = (unsigned long)_orig.r2; \
3414 _argvec[2] = (unsigned long)_orig.nraddr; \
3415 _argvec[2+1] = (unsigned long)arg1; \
3416 _argvec[2+2] = (unsigned long)arg2; \
3417 _argvec[2+3] = (unsigned long)arg3; \
3418 _argvec[2+4] = (unsigned long)arg4; \
3419 _argvec[2+5] = (unsigned long)arg5; \
3420 _argvec[2+6] = (unsigned long)arg6; \
3421 __asm__ volatile( \
3422 VALGRIND_ALIGN_STACK \
3423 "mr 12,%1\n\t" \
3424 "std 2,-16(12)\n\t" /* save tocptr */ \
3425 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3426 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3427 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3428 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3429 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3430 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3431 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3432 "ld 12, 0(12)\n\t" /* target->r12 */ \
3433 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3434 "mr 12,%1\n\t" \
3435 "mr %0,3\n\t" \
3436 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3437 VALGRIND_RESTORE_STACK \
3438 : /*out*/ "=r" (_res) \
3439 : /*in*/ "r" (&_argvec[2]) \
3440 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3441 ); \
3442 lval = (__typeof__(lval)) _res; \
3443 } while (0)
3444
3445 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3446 arg7) \
3447 do { \
3448 volatile OrigFn _orig = (orig); \
3449 volatile unsigned long _argvec[3+7]; \
3450 volatile unsigned long _res; \
3451 /* _argvec[0] holds current r2 across the call */ \
3452 _argvec[1] = (unsigned long)_orig.r2; \
3453 _argvec[2] = (unsigned long)_orig.nraddr; \
3454 _argvec[2+1] = (unsigned long)arg1; \
3455 _argvec[2+2] = (unsigned long)arg2; \
3456 _argvec[2+3] = (unsigned long)arg3; \
3457 _argvec[2+4] = (unsigned long)arg4; \
3458 _argvec[2+5] = (unsigned long)arg5; \
3459 _argvec[2+6] = (unsigned long)arg6; \
3460 _argvec[2+7] = (unsigned long)arg7; \
3461 __asm__ volatile( \
3462 VALGRIND_ALIGN_STACK \
3463 "mr 12,%1\n\t" \
3464 "std 2,-16(12)\n\t" /* save tocptr */ \
3465 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3466 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3467 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3468 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3469 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3470 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3471 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3472 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3473 "ld 12, 0(12)\n\t" /* target->r12 */ \
3474 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3475 "mr 12,%1\n\t" \
3476 "mr %0,3\n\t" \
3477 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3478 VALGRIND_RESTORE_STACK \
3479 : /*out*/ "=r" (_res) \
3480 : /*in*/ "r" (&_argvec[2]) \
3481 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3482 ); \
3483 lval = (__typeof__(lval)) _res; \
3484 } while (0)
3485
3486 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3487 arg7,arg8) \
3488 do { \
3489 volatile OrigFn _orig = (orig); \
3490 volatile unsigned long _argvec[3+8]; \
3491 volatile unsigned long _res; \
3492 /* _argvec[0] holds current r2 across the call */ \
3493 _argvec[1] = (unsigned long)_orig.r2; \
3494 _argvec[2] = (unsigned long)_orig.nraddr; \
3495 _argvec[2+1] = (unsigned long)arg1; \
3496 _argvec[2+2] = (unsigned long)arg2; \
3497 _argvec[2+3] = (unsigned long)arg3; \
3498 _argvec[2+4] = (unsigned long)arg4; \
3499 _argvec[2+5] = (unsigned long)arg5; \
3500 _argvec[2+6] = (unsigned long)arg6; \
3501 _argvec[2+7] = (unsigned long)arg7; \
3502 _argvec[2+8] = (unsigned long)arg8; \
3503 __asm__ volatile( \
3504 VALGRIND_ALIGN_STACK \
3505 "mr 12,%1\n\t" \
3506 "std 2,-16(12)\n\t" /* save tocptr */ \
3507 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3508 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3509 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3510 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3511 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3512 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3513 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3514 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3515 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3516 "ld 12, 0(12)\n\t" /* target->r12 */ \
3517 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3518 "mr 12,%1\n\t" \
3519 "mr %0,3\n\t" \
3520 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3521 VALGRIND_RESTORE_STACK \
3522 : /*out*/ "=r" (_res) \
3523 : /*in*/ "r" (&_argvec[2]) \
3524 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3525 ); \
3526 lval = (__typeof__(lval)) _res; \
3527 } while (0)
3528
3529 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3530 arg7,arg8,arg9) \
3531 do { \
3532 volatile OrigFn _orig = (orig); \
3533 volatile unsigned long _argvec[3+9]; \
3534 volatile unsigned long _res; \
3535 /* _argvec[0] holds current r2 across the call */ \
3536 _argvec[1] = (unsigned long)_orig.r2; \
3537 _argvec[2] = (unsigned long)_orig.nraddr; \
3538 _argvec[2+1] = (unsigned long)arg1; \
3539 _argvec[2+2] = (unsigned long)arg2; \
3540 _argvec[2+3] = (unsigned long)arg3; \
3541 _argvec[2+4] = (unsigned long)arg4; \
3542 _argvec[2+5] = (unsigned long)arg5; \
3543 _argvec[2+6] = (unsigned long)arg6; \
3544 _argvec[2+7] = (unsigned long)arg7; \
3545 _argvec[2+8] = (unsigned long)arg8; \
3546 _argvec[2+9] = (unsigned long)arg9; \
3547 __asm__ volatile( \
3548 VALGRIND_ALIGN_STACK \
3549 "mr 12,%1\n\t" \
3550 "std 2,-16(12)\n\t" /* save tocptr */ \
3551 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3552 "addi 1,1,-128\n\t" /* expand stack frame */ \
3553 /* arg9 */ \
3554 "ld 3,72(12)\n\t" \
3555 "std 3,96(1)\n\t" \
3556 /* args1-8 */ \
3557 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3558 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3559 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3560 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3561 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3562 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3563 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3564 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3565 "ld 12, 0(12)\n\t" /* target->r12 */ \
3566 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3567 "mr 12,%1\n\t" \
3568 "mr %0,3\n\t" \
3569 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3570 VALGRIND_RESTORE_STACK \
3571 : /*out*/ "=r" (_res) \
3572 : /*in*/ "r" (&_argvec[2]) \
3573 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3574 ); \
3575 lval = (__typeof__(lval)) _res; \
3576 } while (0)
3577
3578 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3579 arg7,arg8,arg9,arg10) \
3580 do { \
3581 volatile OrigFn _orig = (orig); \
3582 volatile unsigned long _argvec[3+10]; \
3583 volatile unsigned long _res; \
3584 /* _argvec[0] holds current r2 across the call */ \
3585 _argvec[1] = (unsigned long)_orig.r2; \
3586 _argvec[2] = (unsigned long)_orig.nraddr; \
3587 _argvec[2+1] = (unsigned long)arg1; \
3588 _argvec[2+2] = (unsigned long)arg2; \
3589 _argvec[2+3] = (unsigned long)arg3; \
3590 _argvec[2+4] = (unsigned long)arg4; \
3591 _argvec[2+5] = (unsigned long)arg5; \
3592 _argvec[2+6] = (unsigned long)arg6; \
3593 _argvec[2+7] = (unsigned long)arg7; \
3594 _argvec[2+8] = (unsigned long)arg8; \
3595 _argvec[2+9] = (unsigned long)arg9; \
3596 _argvec[2+10] = (unsigned long)arg10; \
3597 __asm__ volatile( \
3598 VALGRIND_ALIGN_STACK \
3599 "mr 12,%1\n\t" \
3600 "std 2,-16(12)\n\t" /* save tocptr */ \
3601 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3602 "addi 1,1,-128\n\t" /* expand stack frame */ \
3603 /* arg10 */ \
3604 "ld 3,80(12)\n\t" \
3605 "std 3,104(1)\n\t" \
3606 /* arg9 */ \
3607 "ld 3,72(12)\n\t" \
3608 "std 3,96(1)\n\t" \
3609 /* args1-8 */ \
3610 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3611 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3612 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3613 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3614 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3615 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3616 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3617 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3618 "ld 12, 0(12)\n\t" /* target->r12 */ \
3619 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3620 "mr 12,%1\n\t" \
3621 "mr %0,3\n\t" \
3622 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3623 VALGRIND_RESTORE_STACK \
3624 : /*out*/ "=r" (_res) \
3625 : /*in*/ "r" (&_argvec[2]) \
3626 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3627 ); \
3628 lval = (__typeof__(lval)) _res; \
3629 } while (0)
3630
3631 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3632 arg7,arg8,arg9,arg10,arg11) \
3633 do { \
3634 volatile OrigFn _orig = (orig); \
3635 volatile unsigned long _argvec[3+11]; \
3636 volatile unsigned long _res; \
3637 /* _argvec[0] holds current r2 across the call */ \
3638 _argvec[1] = (unsigned long)_orig.r2; \
3639 _argvec[2] = (unsigned long)_orig.nraddr; \
3640 _argvec[2+1] = (unsigned long)arg1; \
3641 _argvec[2+2] = (unsigned long)arg2; \
3642 _argvec[2+3] = (unsigned long)arg3; \
3643 _argvec[2+4] = (unsigned long)arg4; \
3644 _argvec[2+5] = (unsigned long)arg5; \
3645 _argvec[2+6] = (unsigned long)arg6; \
3646 _argvec[2+7] = (unsigned long)arg7; \
3647 _argvec[2+8] = (unsigned long)arg8; \
3648 _argvec[2+9] = (unsigned long)arg9; \
3649 _argvec[2+10] = (unsigned long)arg10; \
3650 _argvec[2+11] = (unsigned long)arg11; \
3651 __asm__ volatile( \
3652 VALGRIND_ALIGN_STACK \
3653 "mr 12,%1\n\t" \
3654 "std 2,-16(12)\n\t" /* save tocptr */ \
3655 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3656 "addi 1,1,-144\n\t" /* expand stack frame */ \
3657 /* arg11 */ \
3658 "ld 3,88(12)\n\t" \
3659 "std 3,112(1)\n\t" \
3660 /* arg10 */ \
3661 "ld 3,80(12)\n\t" \
3662 "std 3,104(1)\n\t" \
3663 /* arg9 */ \
3664 "ld 3,72(12)\n\t" \
3665 "std 3,96(1)\n\t" \
3666 /* args1-8 */ \
3667 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3668 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3669 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3670 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3671 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3672 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3673 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3674 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3675 "ld 12, 0(12)\n\t" /* target->r12 */ \
3676 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3677 "mr 12,%1\n\t" \
3678 "mr %0,3\n\t" \
3679 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3680 VALGRIND_RESTORE_STACK \
3681 : /*out*/ "=r" (_res) \
3682 : /*in*/ "r" (&_argvec[2]) \
3683 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3684 ); \
3685 lval = (__typeof__(lval)) _res; \
3686 } while (0)
3687
3688 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3689 arg7,arg8,arg9,arg10,arg11,arg12) \
3690 do { \
3691 volatile OrigFn _orig = (orig); \
3692 volatile unsigned long _argvec[3+12]; \
3693 volatile unsigned long _res; \
3694 /* _argvec[0] holds current r2 across the call */ \
3695 _argvec[1] = (unsigned long)_orig.r2; \
3696 _argvec[2] = (unsigned long)_orig.nraddr; \
3697 _argvec[2+1] = (unsigned long)arg1; \
3698 _argvec[2+2] = (unsigned long)arg2; \
3699 _argvec[2+3] = (unsigned long)arg3; \
3700 _argvec[2+4] = (unsigned long)arg4; \
3701 _argvec[2+5] = (unsigned long)arg5; \
3702 _argvec[2+6] = (unsigned long)arg6; \
3703 _argvec[2+7] = (unsigned long)arg7; \
3704 _argvec[2+8] = (unsigned long)arg8; \
3705 _argvec[2+9] = (unsigned long)arg9; \
3706 _argvec[2+10] = (unsigned long)arg10; \
3707 _argvec[2+11] = (unsigned long)arg11; \
3708 _argvec[2+12] = (unsigned long)arg12; \
3709 __asm__ volatile( \
3710 VALGRIND_ALIGN_STACK \
3711 "mr 12,%1\n\t" \
3712 "std 2,-16(12)\n\t" /* save tocptr */ \
3713 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3714 "addi 1,1,-144\n\t" /* expand stack frame */ \
3715 /* arg12 */ \
3716 "ld 3,96(12)\n\t" \
3717 "std 3,120(1)\n\t" \
3718 /* arg11 */ \
3719 "ld 3,88(12)\n\t" \
3720 "std 3,112(1)\n\t" \
3721 /* arg10 */ \
3722 "ld 3,80(12)\n\t" \
3723 "std 3,104(1)\n\t" \
3724 /* arg9 */ \
3725 "ld 3,72(12)\n\t" \
3726 "std 3,96(1)\n\t" \
3727 /* args1-8 */ \
3728 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3729 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3730 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3731 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3732 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3733 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3734 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3735 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3736 "ld 12, 0(12)\n\t" /* target->r12 */ \
3737 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3738 "mr 12,%1\n\t" \
3739 "mr %0,3\n\t" \
3740 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3741 VALGRIND_RESTORE_STACK \
3742 : /*out*/ "=r" (_res) \
3743 : /*in*/ "r" (&_argvec[2]) \
3744 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3745 ); \
3746 lval = (__typeof__(lval)) _res; \
3747 } while (0)
3748
3749 #endif /* PLAT_ppc64le_linux */
3750
3751 /* ------------------------- arm-linux ------------------------- */
3752
3753 #if defined(PLAT_arm_linux)
3754
3755 /* These regs are trashed by the hidden call. */
3756 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3757
3758 /* Macros to save and align the stack before making a function
3759 call and restore it afterwards as gcc may not keep the stack
3760 pointer aligned if it doesn't realise calls are being made
3761 to other functions. */
3762
3763 /* This is a bit tricky. We store the original stack pointer in r10
3764 as it is callee-saves. gcc doesn't allow the use of r11 for some
3765 reason. Also, we can't directly "bic" the stack pointer in thumb
3766 mode since r13 isn't an allowed register number in that context.
3767 So use r4 as a temporary, since that is about to get trashed
3768 anyway, just after each use of this macro. Side effect is we need
3769 to be very careful about any future changes, since
3770 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3771 #define VALGRIND_ALIGN_STACK \
3772 "mov r10, sp\n\t" \
3773 "mov r4, sp\n\t" \
3774 "bic r4, r4, #7\n\t" \
3775 "mov sp, r4\n\t"
3776 #define VALGRIND_RESTORE_STACK \
3777 "mov sp, r10\n\t"
3778
3779 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3780 long) == 4. */
3781
3782 #define CALL_FN_W_v(lval, orig) \
3783 do { \
3784 volatile OrigFn _orig = (orig); \
3785 volatile unsigned long _argvec[1]; \
3786 volatile unsigned long _res; \
3787 _argvec[0] = (unsigned long)_orig.nraddr; \
3788 __asm__ volatile( \
3789 VALGRIND_ALIGN_STACK \
3790 "ldr r4, [%1] \n\t" /* target->r4 */ \
3791 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3792 VALGRIND_RESTORE_STACK \
3793 "mov %0, r0\n" \
3794 : /*out*/ "=r" (_res) \
3795 : /*in*/ "0" (&_argvec[0]) \
3796 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3797 ); \
3798 lval = (__typeof__(lval)) _res; \
3799 } while (0)
3800
3801 #define CALL_FN_W_W(lval, orig, arg1) \
3802 do { \
3803 volatile OrigFn _orig = (orig); \
3804 volatile unsigned long _argvec[2]; \
3805 volatile unsigned long _res; \
3806 _argvec[0] = (unsigned long)_orig.nraddr; \
3807 _argvec[1] = (unsigned long)(arg1); \
3808 __asm__ volatile( \
3809 VALGRIND_ALIGN_STACK \
3810 "ldr r0, [%1, #4] \n\t" \
3811 "ldr r4, [%1] \n\t" /* target->r4 */ \
3812 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3813 VALGRIND_RESTORE_STACK \
3814 "mov %0, r0\n" \
3815 : /*out*/ "=r" (_res) \
3816 : /*in*/ "0" (&_argvec[0]) \
3817 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3818 ); \
3819 lval = (__typeof__(lval)) _res; \
3820 } while (0)
3821
3822 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3823 do { \
3824 volatile OrigFn _orig = (orig); \
3825 volatile unsigned long _argvec[3]; \
3826 volatile unsigned long _res; \
3827 _argvec[0] = (unsigned long)_orig.nraddr; \
3828 _argvec[1] = (unsigned long)(arg1); \
3829 _argvec[2] = (unsigned long)(arg2); \
3830 __asm__ volatile( \
3831 VALGRIND_ALIGN_STACK \
3832 "ldr r0, [%1, #4] \n\t" \
3833 "ldr r1, [%1, #8] \n\t" \
3834 "ldr r4, [%1] \n\t" /* target->r4 */ \
3835 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3836 VALGRIND_RESTORE_STACK \
3837 "mov %0, r0\n" \
3838 : /*out*/ "=r" (_res) \
3839 : /*in*/ "0" (&_argvec[0]) \
3840 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3841 ); \
3842 lval = (__typeof__(lval)) _res; \
3843 } while (0)
3844
3845 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3846 do { \
3847 volatile OrigFn _orig = (orig); \
3848 volatile unsigned long _argvec[4]; \
3849 volatile unsigned long _res; \
3850 _argvec[0] = (unsigned long)_orig.nraddr; \
3851 _argvec[1] = (unsigned long)(arg1); \
3852 _argvec[2] = (unsigned long)(arg2); \
3853 _argvec[3] = (unsigned long)(arg3); \
3854 __asm__ volatile( \
3855 VALGRIND_ALIGN_STACK \
3856 "ldr r0, [%1, #4] \n\t" \
3857 "ldr r1, [%1, #8] \n\t" \
3858 "ldr r2, [%1, #12] \n\t" \
3859 "ldr r4, [%1] \n\t" /* target->r4 */ \
3860 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3861 VALGRIND_RESTORE_STACK \
3862 "mov %0, r0\n" \
3863 : /*out*/ "=r" (_res) \
3864 : /*in*/ "0" (&_argvec[0]) \
3865 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3866 ); \
3867 lval = (__typeof__(lval)) _res; \
3868 } while (0)
3869
3870 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3871 do { \
3872 volatile OrigFn _orig = (orig); \
3873 volatile unsigned long _argvec[5]; \
3874 volatile unsigned long _res; \
3875 _argvec[0] = (unsigned long)_orig.nraddr; \
3876 _argvec[1] = (unsigned long)(arg1); \
3877 _argvec[2] = (unsigned long)(arg2); \
3878 _argvec[3] = (unsigned long)(arg3); \
3879 _argvec[4] = (unsigned long)(arg4); \
3880 __asm__ volatile( \
3881 VALGRIND_ALIGN_STACK \
3882 "ldr r0, [%1, #4] \n\t" \
3883 "ldr r1, [%1, #8] \n\t" \
3884 "ldr r2, [%1, #12] \n\t" \
3885 "ldr r3, [%1, #16] \n\t" \
3886 "ldr r4, [%1] \n\t" /* target->r4 */ \
3887 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3888 VALGRIND_RESTORE_STACK \
3889 "mov %0, r0" \
3890 : /*out*/ "=r" (_res) \
3891 : /*in*/ "0" (&_argvec[0]) \
3892 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3893 ); \
3894 lval = (__typeof__(lval)) _res; \
3895 } while (0)
3896
3897 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3898 do { \
3899 volatile OrigFn _orig = (orig); \
3900 volatile unsigned long _argvec[6]; \
3901 volatile unsigned long _res; \
3902 _argvec[0] = (unsigned long)_orig.nraddr; \
3903 _argvec[1] = (unsigned long)(arg1); \
3904 _argvec[2] = (unsigned long)(arg2); \
3905 _argvec[3] = (unsigned long)(arg3); \
3906 _argvec[4] = (unsigned long)(arg4); \
3907 _argvec[5] = (unsigned long)(arg5); \
3908 __asm__ volatile( \
3909 VALGRIND_ALIGN_STACK \
3910 "sub sp, sp, #4 \n\t" \
3911 "ldr r0, [%1, #20] \n\t" \
3912 "push {r0} \n\t" \
3913 "ldr r0, [%1, #4] \n\t" \
3914 "ldr r1, [%1, #8] \n\t" \
3915 "ldr r2, [%1, #12] \n\t" \
3916 "ldr r3, [%1, #16] \n\t" \
3917 "ldr r4, [%1] \n\t" /* target->r4 */ \
3918 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3919 VALGRIND_RESTORE_STACK \
3920 "mov %0, r0" \
3921 : /*out*/ "=r" (_res) \
3922 : /*in*/ "0" (&_argvec[0]) \
3923 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3924 ); \
3925 lval = (__typeof__(lval)) _res; \
3926 } while (0)
3927
3928 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3929 do { \
3930 volatile OrigFn _orig = (orig); \
3931 volatile unsigned long _argvec[7]; \
3932 volatile unsigned long _res; \
3933 _argvec[0] = (unsigned long)_orig.nraddr; \
3934 _argvec[1] = (unsigned long)(arg1); \
3935 _argvec[2] = (unsigned long)(arg2); \
3936 _argvec[3] = (unsigned long)(arg3); \
3937 _argvec[4] = (unsigned long)(arg4); \
3938 _argvec[5] = (unsigned long)(arg5); \
3939 _argvec[6] = (unsigned long)(arg6); \
3940 __asm__ volatile( \
3941 VALGRIND_ALIGN_STACK \
3942 "ldr r0, [%1, #20] \n\t" \
3943 "ldr r1, [%1, #24] \n\t" \
3944 "push {r0, r1} \n\t" \
3945 "ldr r0, [%1, #4] \n\t" \
3946 "ldr r1, [%1, #8] \n\t" \
3947 "ldr r2, [%1, #12] \n\t" \
3948 "ldr r3, [%1, #16] \n\t" \
3949 "ldr r4, [%1] \n\t" /* target->r4 */ \
3950 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3951 VALGRIND_RESTORE_STACK \
3952 "mov %0, r0" \
3953 : /*out*/ "=r" (_res) \
3954 : /*in*/ "0" (&_argvec[0]) \
3955 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3956 ); \
3957 lval = (__typeof__(lval)) _res; \
3958 } while (0)
3959
3960 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3961 arg7) \
3962 do { \
3963 volatile OrigFn _orig = (orig); \
3964 volatile unsigned long _argvec[8]; \
3965 volatile unsigned long _res; \
3966 _argvec[0] = (unsigned long)_orig.nraddr; \
3967 _argvec[1] = (unsigned long)(arg1); \
3968 _argvec[2] = (unsigned long)(arg2); \
3969 _argvec[3] = (unsigned long)(arg3); \
3970 _argvec[4] = (unsigned long)(arg4); \
3971 _argvec[5] = (unsigned long)(arg5); \
3972 _argvec[6] = (unsigned long)(arg6); \
3973 _argvec[7] = (unsigned long)(arg7); \
3974 __asm__ volatile( \
3975 VALGRIND_ALIGN_STACK \
3976 "sub sp, sp, #4 \n\t" \
3977 "ldr r0, [%1, #20] \n\t" \
3978 "ldr r1, [%1, #24] \n\t" \
3979 "ldr r2, [%1, #28] \n\t" \
3980 "push {r0, r1, r2} \n\t" \
3981 "ldr r0, [%1, #4] \n\t" \
3982 "ldr r1, [%1, #8] \n\t" \
3983 "ldr r2, [%1, #12] \n\t" \
3984 "ldr r3, [%1, #16] \n\t" \
3985 "ldr r4, [%1] \n\t" /* target->r4 */ \
3986 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3987 VALGRIND_RESTORE_STACK \
3988 "mov %0, r0" \
3989 : /*out*/ "=r" (_res) \
3990 : /*in*/ "0" (&_argvec[0]) \
3991 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3992 ); \
3993 lval = (__typeof__(lval)) _res; \
3994 } while (0)
3995
3996 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3997 arg7,arg8) \
3998 do { \
3999 volatile OrigFn _orig = (orig); \
4000 volatile unsigned long _argvec[9]; \
4001 volatile unsigned long _res; \
4002 _argvec[0] = (unsigned long)_orig.nraddr; \
4003 _argvec[1] = (unsigned long)(arg1); \
4004 _argvec[2] = (unsigned long)(arg2); \
4005 _argvec[3] = (unsigned long)(arg3); \
4006 _argvec[4] = (unsigned long)(arg4); \
4007 _argvec[5] = (unsigned long)(arg5); \
4008 _argvec[6] = (unsigned long)(arg6); \
4009 _argvec[7] = (unsigned long)(arg7); \
4010 _argvec[8] = (unsigned long)(arg8); \
4011 __asm__ volatile( \
4012 VALGRIND_ALIGN_STACK \
4013 "ldr r0, [%1, #20] \n\t" \
4014 "ldr r1, [%1, #24] \n\t" \
4015 "ldr r2, [%1, #28] \n\t" \
4016 "ldr r3, [%1, #32] \n\t" \
4017 "push {r0, r1, r2, r3} \n\t" \
4018 "ldr r0, [%1, #4] \n\t" \
4019 "ldr r1, [%1, #8] \n\t" \
4020 "ldr r2, [%1, #12] \n\t" \
4021 "ldr r3, [%1, #16] \n\t" \
4022 "ldr r4, [%1] \n\t" /* target->r4 */ \
4023 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4024 VALGRIND_RESTORE_STACK \
4025 "mov %0, r0" \
4026 : /*out*/ "=r" (_res) \
4027 : /*in*/ "0" (&_argvec[0]) \
4028 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4029 ); \
4030 lval = (__typeof__(lval)) _res; \
4031 } while (0)
4032
4033 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4034 arg7,arg8,arg9) \
4035 do { \
4036 volatile OrigFn _orig = (orig); \
4037 volatile unsigned long _argvec[10]; \
4038 volatile unsigned long _res; \
4039 _argvec[0] = (unsigned long)_orig.nraddr; \
4040 _argvec[1] = (unsigned long)(arg1); \
4041 _argvec[2] = (unsigned long)(arg2); \
4042 _argvec[3] = (unsigned long)(arg3); \
4043 _argvec[4] = (unsigned long)(arg4); \
4044 _argvec[5] = (unsigned long)(arg5); \
4045 _argvec[6] = (unsigned long)(arg6); \
4046 _argvec[7] = (unsigned long)(arg7); \
4047 _argvec[8] = (unsigned long)(arg8); \
4048 _argvec[9] = (unsigned long)(arg9); \
4049 __asm__ volatile( \
4050 VALGRIND_ALIGN_STACK \
4051 "sub sp, sp, #4 \n\t" \
4052 "ldr r0, [%1, #20] \n\t" \
4053 "ldr r1, [%1, #24] \n\t" \
4054 "ldr r2, [%1, #28] \n\t" \
4055 "ldr r3, [%1, #32] \n\t" \
4056 "ldr r4, [%1, #36] \n\t" \
4057 "push {r0, r1, r2, r3, r4} \n\t" \
4058 "ldr r0, [%1, #4] \n\t" \
4059 "ldr r1, [%1, #8] \n\t" \
4060 "ldr r2, [%1, #12] \n\t" \
4061 "ldr r3, [%1, #16] \n\t" \
4062 "ldr r4, [%1] \n\t" /* target->r4 */ \
4063 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4064 VALGRIND_RESTORE_STACK \
4065 "mov %0, r0" \
4066 : /*out*/ "=r" (_res) \
4067 : /*in*/ "0" (&_argvec[0]) \
4068 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4069 ); \
4070 lval = (__typeof__(lval)) _res; \
4071 } while (0)
4072
4073 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4074 arg7,arg8,arg9,arg10) \
4075 do { \
4076 volatile OrigFn _orig = (orig); \
4077 volatile unsigned long _argvec[11]; \
4078 volatile unsigned long _res; \
4079 _argvec[0] = (unsigned long)_orig.nraddr; \
4080 _argvec[1] = (unsigned long)(arg1); \
4081 _argvec[2] = (unsigned long)(arg2); \
4082 _argvec[3] = (unsigned long)(arg3); \
4083 _argvec[4] = (unsigned long)(arg4); \
4084 _argvec[5] = (unsigned long)(arg5); \
4085 _argvec[6] = (unsigned long)(arg6); \
4086 _argvec[7] = (unsigned long)(arg7); \
4087 _argvec[8] = (unsigned long)(arg8); \
4088 _argvec[9] = (unsigned long)(arg9); \
4089 _argvec[10] = (unsigned long)(arg10); \
4090 __asm__ volatile( \
4091 VALGRIND_ALIGN_STACK \
4092 "ldr r0, [%1, #40] \n\t" \
4093 "push {r0} \n\t" \
4094 "ldr r0, [%1, #20] \n\t" \
4095 "ldr r1, [%1, #24] \n\t" \
4096 "ldr r2, [%1, #28] \n\t" \
4097 "ldr r3, [%1, #32] \n\t" \
4098 "ldr r4, [%1, #36] \n\t" \
4099 "push {r0, r1, r2, r3, r4} \n\t" \
4100 "ldr r0, [%1, #4] \n\t" \
4101 "ldr r1, [%1, #8] \n\t" \
4102 "ldr r2, [%1, #12] \n\t" \
4103 "ldr r3, [%1, #16] \n\t" \
4104 "ldr r4, [%1] \n\t" /* target->r4 */ \
4105 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4106 VALGRIND_RESTORE_STACK \
4107 "mov %0, r0" \
4108 : /*out*/ "=r" (_res) \
4109 : /*in*/ "0" (&_argvec[0]) \
4110 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4111 ); \
4112 lval = (__typeof__(lval)) _res; \
4113 } while (0)
4114
4115 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4116 arg6,arg7,arg8,arg9,arg10, \
4117 arg11) \
4118 do { \
4119 volatile OrigFn _orig = (orig); \
4120 volatile unsigned long _argvec[12]; \
4121 volatile unsigned long _res; \
4122 _argvec[0] = (unsigned long)_orig.nraddr; \
4123 _argvec[1] = (unsigned long)(arg1); \
4124 _argvec[2] = (unsigned long)(arg2); \
4125 _argvec[3] = (unsigned long)(arg3); \
4126 _argvec[4] = (unsigned long)(arg4); \
4127 _argvec[5] = (unsigned long)(arg5); \
4128 _argvec[6] = (unsigned long)(arg6); \
4129 _argvec[7] = (unsigned long)(arg7); \
4130 _argvec[8] = (unsigned long)(arg8); \
4131 _argvec[9] = (unsigned long)(arg9); \
4132 _argvec[10] = (unsigned long)(arg10); \
4133 _argvec[11] = (unsigned long)(arg11); \
4134 __asm__ volatile( \
4135 VALGRIND_ALIGN_STACK \
4136 "sub sp, sp, #4 \n\t" \
4137 "ldr r0, [%1, #40] \n\t" \
4138 "ldr r1, [%1, #44] \n\t" \
4139 "push {r0, r1} \n\t" \
4140 "ldr r0, [%1, #20] \n\t" \
4141 "ldr r1, [%1, #24] \n\t" \
4142 "ldr r2, [%1, #28] \n\t" \
4143 "ldr r3, [%1, #32] \n\t" \
4144 "ldr r4, [%1, #36] \n\t" \
4145 "push {r0, r1, r2, r3, r4} \n\t" \
4146 "ldr r0, [%1, #4] \n\t" \
4147 "ldr r1, [%1, #8] \n\t" \
4148 "ldr r2, [%1, #12] \n\t" \
4149 "ldr r3, [%1, #16] \n\t" \
4150 "ldr r4, [%1] \n\t" /* target->r4 */ \
4151 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4152 VALGRIND_RESTORE_STACK \
4153 "mov %0, r0" \
4154 : /*out*/ "=r" (_res) \
4155 : /*in*/ "0" (&_argvec[0]) \
4156 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4157 ); \
4158 lval = (__typeof__(lval)) _res; \
4159 } while (0)
4160
4161 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4162 arg6,arg7,arg8,arg9,arg10, \
4163 arg11,arg12) \
4164 do { \
4165 volatile OrigFn _orig = (orig); \
4166 volatile unsigned long _argvec[13]; \
4167 volatile unsigned long _res; \
4168 _argvec[0] = (unsigned long)_orig.nraddr; \
4169 _argvec[1] = (unsigned long)(arg1); \
4170 _argvec[2] = (unsigned long)(arg2); \
4171 _argvec[3] = (unsigned long)(arg3); \
4172 _argvec[4] = (unsigned long)(arg4); \
4173 _argvec[5] = (unsigned long)(arg5); \
4174 _argvec[6] = (unsigned long)(arg6); \
4175 _argvec[7] = (unsigned long)(arg7); \
4176 _argvec[8] = (unsigned long)(arg8); \
4177 _argvec[9] = (unsigned long)(arg9); \
4178 _argvec[10] = (unsigned long)(arg10); \
4179 _argvec[11] = (unsigned long)(arg11); \
4180 _argvec[12] = (unsigned long)(arg12); \
4181 __asm__ volatile( \
4182 VALGRIND_ALIGN_STACK \
4183 "ldr r0, [%1, #40] \n\t" \
4184 "ldr r1, [%1, #44] \n\t" \
4185 "ldr r2, [%1, #48] \n\t" \
4186 "push {r0, r1, r2} \n\t" \
4187 "ldr r0, [%1, #20] \n\t" \
4188 "ldr r1, [%1, #24] \n\t" \
4189 "ldr r2, [%1, #28] \n\t" \
4190 "ldr r3, [%1, #32] \n\t" \
4191 "ldr r4, [%1, #36] \n\t" \
4192 "push {r0, r1, r2, r3, r4} \n\t" \
4193 "ldr r0, [%1, #4] \n\t" \
4194 "ldr r1, [%1, #8] \n\t" \
4195 "ldr r2, [%1, #12] \n\t" \
4196 "ldr r3, [%1, #16] \n\t" \
4197 "ldr r4, [%1] \n\t" /* target->r4 */ \
4198 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4199 VALGRIND_RESTORE_STACK \
4200 "mov %0, r0" \
4201 : /*out*/ "=r" (_res) \
4202 : /*in*/ "0" (&_argvec[0]) \
4203 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4204 ); \
4205 lval = (__typeof__(lval)) _res; \
4206 } while (0)
4207
4208 #endif /* PLAT_arm_linux */
4209
4210 /* ------------------------ arm64-linux ------------------------ */
4211
4212 #if defined(PLAT_arm64_linux)
4213
4214 /* These regs are trashed by the hidden call. */
4215 #define __CALLER_SAVED_REGS \
4216 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4217 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4218 "x18", "x19", "x20", "x30", \
4219 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4220 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4221 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4222 "v26", "v27", "v28", "v29", "v30", "v31"
4223
4224 /* x21 is callee-saved, so we can use it to save and restore SP around
4225 the hidden call. */
4226 #define VALGRIND_ALIGN_STACK \
4227 "mov x21, sp\n\t" \
4228 "bic sp, x21, #15\n\t"
4229 #define VALGRIND_RESTORE_STACK \
4230 "mov sp, x21\n\t"
4231
4232 /* These CALL_FN_ macros assume that on arm64-linux,
4233 sizeof(unsigned long) == 8. */
4234
4235 #define CALL_FN_W_v(lval, orig) \
4236 do { \
4237 volatile OrigFn _orig = (orig); \
4238 volatile unsigned long _argvec[1]; \
4239 volatile unsigned long _res; \
4240 _argvec[0] = (unsigned long)_orig.nraddr; \
4241 __asm__ volatile( \
4242 VALGRIND_ALIGN_STACK \
4243 "ldr x8, [%1] \n\t" /* target->x8 */ \
4244 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4245 VALGRIND_RESTORE_STACK \
4246 "mov %0, x0\n" \
4247 : /*out*/ "=r" (_res) \
4248 : /*in*/ "0" (&_argvec[0]) \
4249 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4250 ); \
4251 lval = (__typeof__(lval)) _res; \
4252 } while (0)
4253
4254 #define CALL_FN_W_W(lval, orig, arg1) \
4255 do { \
4256 volatile OrigFn _orig = (orig); \
4257 volatile unsigned long _argvec[2]; \
4258 volatile unsigned long _res; \
4259 _argvec[0] = (unsigned long)_orig.nraddr; \
4260 _argvec[1] = (unsigned long)(arg1); \
4261 __asm__ volatile( \
4262 VALGRIND_ALIGN_STACK \
4263 "ldr x0, [%1, #8] \n\t" \
4264 "ldr x8, [%1] \n\t" /* target->x8 */ \
4265 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4266 VALGRIND_RESTORE_STACK \
4267 "mov %0, x0\n" \
4268 : /*out*/ "=r" (_res) \
4269 : /*in*/ "0" (&_argvec[0]) \
4270 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4271 ); \
4272 lval = (__typeof__(lval)) _res; \
4273 } while (0)
4274
4275 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4276 do { \
4277 volatile OrigFn _orig = (orig); \
4278 volatile unsigned long _argvec[3]; \
4279 volatile unsigned long _res; \
4280 _argvec[0] = (unsigned long)_orig.nraddr; \
4281 _argvec[1] = (unsigned long)(arg1); \
4282 _argvec[2] = (unsigned long)(arg2); \
4283 __asm__ volatile( \
4284 VALGRIND_ALIGN_STACK \
4285 "ldr x0, [%1, #8] \n\t" \
4286 "ldr x1, [%1, #16] \n\t" \
4287 "ldr x8, [%1] \n\t" /* target->x8 */ \
4288 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4289 VALGRIND_RESTORE_STACK \
4290 "mov %0, x0\n" \
4291 : /*out*/ "=r" (_res) \
4292 : /*in*/ "0" (&_argvec[0]) \
4293 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4294 ); \
4295 lval = (__typeof__(lval)) _res; \
4296 } while (0)
4297
4298 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4299 do { \
4300 volatile OrigFn _orig = (orig); \
4301 volatile unsigned long _argvec[4]; \
4302 volatile unsigned long _res; \
4303 _argvec[0] = (unsigned long)_orig.nraddr; \
4304 _argvec[1] = (unsigned long)(arg1); \
4305 _argvec[2] = (unsigned long)(arg2); \
4306 _argvec[3] = (unsigned long)(arg3); \
4307 __asm__ volatile( \
4308 VALGRIND_ALIGN_STACK \
4309 "ldr x0, [%1, #8] \n\t" \
4310 "ldr x1, [%1, #16] \n\t" \
4311 "ldr x2, [%1, #24] \n\t" \
4312 "ldr x8, [%1] \n\t" /* target->x8 */ \
4313 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4314 VALGRIND_RESTORE_STACK \
4315 "mov %0, x0\n" \
4316 : /*out*/ "=r" (_res) \
4317 : /*in*/ "0" (&_argvec[0]) \
4318 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4319 ); \
4320 lval = (__typeof__(lval)) _res; \
4321 } while (0)
4322
4323 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4324 do { \
4325 volatile OrigFn _orig = (orig); \
4326 volatile unsigned long _argvec[5]; \
4327 volatile unsigned long _res; \
4328 _argvec[0] = (unsigned long)_orig.nraddr; \
4329 _argvec[1] = (unsigned long)(arg1); \
4330 _argvec[2] = (unsigned long)(arg2); \
4331 _argvec[3] = (unsigned long)(arg3); \
4332 _argvec[4] = (unsigned long)(arg4); \
4333 __asm__ volatile( \
4334 VALGRIND_ALIGN_STACK \
4335 "ldr x0, [%1, #8] \n\t" \
4336 "ldr x1, [%1, #16] \n\t" \
4337 "ldr x2, [%1, #24] \n\t" \
4338 "ldr x3, [%1, #32] \n\t" \
4339 "ldr x8, [%1] \n\t" /* target->x8 */ \
4340 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4341 VALGRIND_RESTORE_STACK \
4342 "mov %0, x0" \
4343 : /*out*/ "=r" (_res) \
4344 : /*in*/ "0" (&_argvec[0]) \
4345 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4346 ); \
4347 lval = (__typeof__(lval)) _res; \
4348 } while (0)
4349
4350 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4351 do { \
4352 volatile OrigFn _orig = (orig); \
4353 volatile unsigned long _argvec[6]; \
4354 volatile unsigned long _res; \
4355 _argvec[0] = (unsigned long)_orig.nraddr; \
4356 _argvec[1] = (unsigned long)(arg1); \
4357 _argvec[2] = (unsigned long)(arg2); \
4358 _argvec[3] = (unsigned long)(arg3); \
4359 _argvec[4] = (unsigned long)(arg4); \
4360 _argvec[5] = (unsigned long)(arg5); \
4361 __asm__ volatile( \
4362 VALGRIND_ALIGN_STACK \
4363 "ldr x0, [%1, #8] \n\t" \
4364 "ldr x1, [%1, #16] \n\t" \
4365 "ldr x2, [%1, #24] \n\t" \
4366 "ldr x3, [%1, #32] \n\t" \
4367 "ldr x4, [%1, #40] \n\t" \
4368 "ldr x8, [%1] \n\t" /* target->x8 */ \
4369 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4370 VALGRIND_RESTORE_STACK \
4371 "mov %0, x0" \
4372 : /*out*/ "=r" (_res) \
4373 : /*in*/ "0" (&_argvec[0]) \
4374 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4375 ); \
4376 lval = (__typeof__(lval)) _res; \
4377 } while (0)
4378
4379 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4380 do { \
4381 volatile OrigFn _orig = (orig); \
4382 volatile unsigned long _argvec[7]; \
4383 volatile unsigned long _res; \
4384 _argvec[0] = (unsigned long)_orig.nraddr; \
4385 _argvec[1] = (unsigned long)(arg1); \
4386 _argvec[2] = (unsigned long)(arg2); \
4387 _argvec[3] = (unsigned long)(arg3); \
4388 _argvec[4] = (unsigned long)(arg4); \
4389 _argvec[5] = (unsigned long)(arg5); \
4390 _argvec[6] = (unsigned long)(arg6); \
4391 __asm__ volatile( \
4392 VALGRIND_ALIGN_STACK \
4393 "ldr x0, [%1, #8] \n\t" \
4394 "ldr x1, [%1, #16] \n\t" \
4395 "ldr x2, [%1, #24] \n\t" \
4396 "ldr x3, [%1, #32] \n\t" \
4397 "ldr x4, [%1, #40] \n\t" \
4398 "ldr x5, [%1, #48] \n\t" \
4399 "ldr x8, [%1] \n\t" /* target->x8 */ \
4400 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4401 VALGRIND_RESTORE_STACK \
4402 "mov %0, x0" \
4403 : /*out*/ "=r" (_res) \
4404 : /*in*/ "0" (&_argvec[0]) \
4405 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4406 ); \
4407 lval = (__typeof__(lval)) _res; \
4408 } while (0)
4409
4410 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4411 arg7) \
4412 do { \
4413 volatile OrigFn _orig = (orig); \
4414 volatile unsigned long _argvec[8]; \
4415 volatile unsigned long _res; \
4416 _argvec[0] = (unsigned long)_orig.nraddr; \
4417 _argvec[1] = (unsigned long)(arg1); \
4418 _argvec[2] = (unsigned long)(arg2); \
4419 _argvec[3] = (unsigned long)(arg3); \
4420 _argvec[4] = (unsigned long)(arg4); \
4421 _argvec[5] = (unsigned long)(arg5); \
4422 _argvec[6] = (unsigned long)(arg6); \
4423 _argvec[7] = (unsigned long)(arg7); \
4424 __asm__ volatile( \
4425 VALGRIND_ALIGN_STACK \
4426 "ldr x0, [%1, #8] \n\t" \
4427 "ldr x1, [%1, #16] \n\t" \
4428 "ldr x2, [%1, #24] \n\t" \
4429 "ldr x3, [%1, #32] \n\t" \
4430 "ldr x4, [%1, #40] \n\t" \
4431 "ldr x5, [%1, #48] \n\t" \
4432 "ldr x6, [%1, #56] \n\t" \
4433 "ldr x8, [%1] \n\t" /* target->x8 */ \
4434 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4435 VALGRIND_RESTORE_STACK \
4436 "mov %0, x0" \
4437 : /*out*/ "=r" (_res) \
4438 : /*in*/ "0" (&_argvec[0]) \
4439 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4440 ); \
4441 lval = (__typeof__(lval)) _res; \
4442 } while (0)
4443
4444 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4445 arg7,arg8) \
4446 do { \
4447 volatile OrigFn _orig = (orig); \
4448 volatile unsigned long _argvec[9]; \
4449 volatile unsigned long _res; \
4450 _argvec[0] = (unsigned long)_orig.nraddr; \
4451 _argvec[1] = (unsigned long)(arg1); \
4452 _argvec[2] = (unsigned long)(arg2); \
4453 _argvec[3] = (unsigned long)(arg3); \
4454 _argvec[4] = (unsigned long)(arg4); \
4455 _argvec[5] = (unsigned long)(arg5); \
4456 _argvec[6] = (unsigned long)(arg6); \
4457 _argvec[7] = (unsigned long)(arg7); \
4458 _argvec[8] = (unsigned long)(arg8); \
4459 __asm__ volatile( \
4460 VALGRIND_ALIGN_STACK \
4461 "ldr x0, [%1, #8] \n\t" \
4462 "ldr x1, [%1, #16] \n\t" \
4463 "ldr x2, [%1, #24] \n\t" \
4464 "ldr x3, [%1, #32] \n\t" \
4465 "ldr x4, [%1, #40] \n\t" \
4466 "ldr x5, [%1, #48] \n\t" \
4467 "ldr x6, [%1, #56] \n\t" \
4468 "ldr x7, [%1, #64] \n\t" \
4469 "ldr x8, [%1] \n\t" /* target->x8 */ \
4470 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4471 VALGRIND_RESTORE_STACK \
4472 "mov %0, x0" \
4473 : /*out*/ "=r" (_res) \
4474 : /*in*/ "0" (&_argvec[0]) \
4475 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4476 ); \
4477 lval = (__typeof__(lval)) _res; \
4478 } while (0)
4479
4480 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4481 arg7,arg8,arg9) \
4482 do { \
4483 volatile OrigFn _orig = (orig); \
4484 volatile unsigned long _argvec[10]; \
4485 volatile unsigned long _res; \
4486 _argvec[0] = (unsigned long)_orig.nraddr; \
4487 _argvec[1] = (unsigned long)(arg1); \
4488 _argvec[2] = (unsigned long)(arg2); \
4489 _argvec[3] = (unsigned long)(arg3); \
4490 _argvec[4] = (unsigned long)(arg4); \
4491 _argvec[5] = (unsigned long)(arg5); \
4492 _argvec[6] = (unsigned long)(arg6); \
4493 _argvec[7] = (unsigned long)(arg7); \
4494 _argvec[8] = (unsigned long)(arg8); \
4495 _argvec[9] = (unsigned long)(arg9); \
4496 __asm__ volatile( \
4497 VALGRIND_ALIGN_STACK \
4498 "sub sp, sp, #0x20 \n\t" \
4499 "ldr x0, [%1, #8] \n\t" \
4500 "ldr x1, [%1, #16] \n\t" \
4501 "ldr x2, [%1, #24] \n\t" \
4502 "ldr x3, [%1, #32] \n\t" \
4503 "ldr x4, [%1, #40] \n\t" \
4504 "ldr x5, [%1, #48] \n\t" \
4505 "ldr x6, [%1, #56] \n\t" \
4506 "ldr x7, [%1, #64] \n\t" \
4507 "ldr x8, [%1, #72] \n\t" \
4508 "str x8, [sp, #0] \n\t" \
4509 "ldr x8, [%1] \n\t" /* target->x8 */ \
4510 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4511 VALGRIND_RESTORE_STACK \
4512 "mov %0, x0" \
4513 : /*out*/ "=r" (_res) \
4514 : /*in*/ "0" (&_argvec[0]) \
4515 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4516 ); \
4517 lval = (__typeof__(lval)) _res; \
4518 } while (0)
4519
4520 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4521 arg7,arg8,arg9,arg10) \
4522 do { \
4523 volatile OrigFn _orig = (orig); \
4524 volatile unsigned long _argvec[11]; \
4525 volatile unsigned long _res; \
4526 _argvec[0] = (unsigned long)_orig.nraddr; \
4527 _argvec[1] = (unsigned long)(arg1); \
4528 _argvec[2] = (unsigned long)(arg2); \
4529 _argvec[3] = (unsigned long)(arg3); \
4530 _argvec[4] = (unsigned long)(arg4); \
4531 _argvec[5] = (unsigned long)(arg5); \
4532 _argvec[6] = (unsigned long)(arg6); \
4533 _argvec[7] = (unsigned long)(arg7); \
4534 _argvec[8] = (unsigned long)(arg8); \
4535 _argvec[9] = (unsigned long)(arg9); \
4536 _argvec[10] = (unsigned long)(arg10); \
4537 __asm__ volatile( \
4538 VALGRIND_ALIGN_STACK \
4539 "sub sp, sp, #0x20 \n\t" \
4540 "ldr x0, [%1, #8] \n\t" \
4541 "ldr x1, [%1, #16] \n\t" \
4542 "ldr x2, [%1, #24] \n\t" \
4543 "ldr x3, [%1, #32] \n\t" \
4544 "ldr x4, [%1, #40] \n\t" \
4545 "ldr x5, [%1, #48] \n\t" \
4546 "ldr x6, [%1, #56] \n\t" \
4547 "ldr x7, [%1, #64] \n\t" \
4548 "ldr x8, [%1, #72] \n\t" \
4549 "str x8, [sp, #0] \n\t" \
4550 "ldr x8, [%1, #80] \n\t" \
4551 "str x8, [sp, #8] \n\t" \
4552 "ldr x8, [%1] \n\t" /* target->x8 */ \
4553 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4554 VALGRIND_RESTORE_STACK \
4555 "mov %0, x0" \
4556 : /*out*/ "=r" (_res) \
4557 : /*in*/ "0" (&_argvec[0]) \
4558 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4559 ); \
4560 lval = (__typeof__(lval)) _res; \
4561 } while (0)
4562
4563 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4564 arg7,arg8,arg9,arg10,arg11) \
4565 do { \
4566 volatile OrigFn _orig = (orig); \
4567 volatile unsigned long _argvec[12]; \
4568 volatile unsigned long _res; \
4569 _argvec[0] = (unsigned long)_orig.nraddr; \
4570 _argvec[1] = (unsigned long)(arg1); \
4571 _argvec[2] = (unsigned long)(arg2); \
4572 _argvec[3] = (unsigned long)(arg3); \
4573 _argvec[4] = (unsigned long)(arg4); \
4574 _argvec[5] = (unsigned long)(arg5); \
4575 _argvec[6] = (unsigned long)(arg6); \
4576 _argvec[7] = (unsigned long)(arg7); \
4577 _argvec[8] = (unsigned long)(arg8); \
4578 _argvec[9] = (unsigned long)(arg9); \
4579 _argvec[10] = (unsigned long)(arg10); \
4580 _argvec[11] = (unsigned long)(arg11); \
4581 __asm__ volatile( \
4582 VALGRIND_ALIGN_STACK \
4583 "sub sp, sp, #0x30 \n\t" \
4584 "ldr x0, [%1, #8] \n\t" \
4585 "ldr x1, [%1, #16] \n\t" \
4586 "ldr x2, [%1, #24] \n\t" \
4587 "ldr x3, [%1, #32] \n\t" \
4588 "ldr x4, [%1, #40] \n\t" \
4589 "ldr x5, [%1, #48] \n\t" \
4590 "ldr x6, [%1, #56] \n\t" \
4591 "ldr x7, [%1, #64] \n\t" \
4592 "ldr x8, [%1, #72] \n\t" \
4593 "str x8, [sp, #0] \n\t" \
4594 "ldr x8, [%1, #80] \n\t" \
4595 "str x8, [sp, #8] \n\t" \
4596 "ldr x8, [%1, #88] \n\t" \
4597 "str x8, [sp, #16] \n\t" \
4598 "ldr x8, [%1] \n\t" /* target->x8 */ \
4599 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4600 VALGRIND_RESTORE_STACK \
4601 "mov %0, x0" \
4602 : /*out*/ "=r" (_res) \
4603 : /*in*/ "0" (&_argvec[0]) \
4604 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4605 ); \
4606 lval = (__typeof__(lval)) _res; \
4607 } while (0)
4608
4609 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4610 arg7,arg8,arg9,arg10,arg11, \
4611 arg12) \
4612 do { \
4613 volatile OrigFn _orig = (orig); \
4614 volatile unsigned long _argvec[13]; \
4615 volatile unsigned long _res; \
4616 _argvec[0] = (unsigned long)_orig.nraddr; \
4617 _argvec[1] = (unsigned long)(arg1); \
4618 _argvec[2] = (unsigned long)(arg2); \
4619 _argvec[3] = (unsigned long)(arg3); \
4620 _argvec[4] = (unsigned long)(arg4); \
4621 _argvec[5] = (unsigned long)(arg5); \
4622 _argvec[6] = (unsigned long)(arg6); \
4623 _argvec[7] = (unsigned long)(arg7); \
4624 _argvec[8] = (unsigned long)(arg8); \
4625 _argvec[9] = (unsigned long)(arg9); \
4626 _argvec[10] = (unsigned long)(arg10); \
4627 _argvec[11] = (unsigned long)(arg11); \
4628 _argvec[12] = (unsigned long)(arg12); \
4629 __asm__ volatile( \
4630 VALGRIND_ALIGN_STACK \
4631 "sub sp, sp, #0x30 \n\t" \
4632 "ldr x0, [%1, #8] \n\t" \
4633 "ldr x1, [%1, #16] \n\t" \
4634 "ldr x2, [%1, #24] \n\t" \
4635 "ldr x3, [%1, #32] \n\t" \
4636 "ldr x4, [%1, #40] \n\t" \
4637 "ldr x5, [%1, #48] \n\t" \
4638 "ldr x6, [%1, #56] \n\t" \
4639 "ldr x7, [%1, #64] \n\t" \
4640 "ldr x8, [%1, #72] \n\t" \
4641 "str x8, [sp, #0] \n\t" \
4642 "ldr x8, [%1, #80] \n\t" \
4643 "str x8, [sp, #8] \n\t" \
4644 "ldr x8, [%1, #88] \n\t" \
4645 "str x8, [sp, #16] \n\t" \
4646 "ldr x8, [%1, #96] \n\t" \
4647 "str x8, [sp, #24] \n\t" \
4648 "ldr x8, [%1] \n\t" /* target->x8 */ \
4649 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4650 VALGRIND_RESTORE_STACK \
4651 "mov %0, x0" \
4652 : /*out*/ "=r" (_res) \
4653 : /*in*/ "0" (&_argvec[0]) \
4654 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4655 ); \
4656 lval = (__typeof__(lval)) _res; \
4657 } while (0)
4658
4659 #endif /* PLAT_arm64_linux */
4660
4661 /* ------------------------- s390x-linux ------------------------- */
4662
4663 #if defined(PLAT_s390x_linux)
4664
4665 /* Similar workaround as amd64 (see above), but we use r11 as frame
4666 pointer and save the old r11 in r7. r11 might be used for
4667 argvec, therefore we copy argvec in r1 since r1 is clobbered
4668 after the call anyway. */
4669 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4670 # define __FRAME_POINTER \
4671 ,"d"(__builtin_dwarf_cfa())
4672 # define VALGRIND_CFI_PROLOGUE \
4673 ".cfi_remember_state\n\t" \
4674 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4675 "lgr 7,11\n\t" \
4676 "lgr 11,%2\n\t" \
4677 ".cfi_def_cfa r11, 0\n\t"
4678 # define VALGRIND_CFI_EPILOGUE \
4679 "lgr 11, 7\n\t" \
4680 ".cfi_restore_state\n\t"
4681 #else
4682 # define __FRAME_POINTER
4683 # define VALGRIND_CFI_PROLOGUE \
4684 "lgr 1,%1\n\t"
4685 # define VALGRIND_CFI_EPILOGUE
4686 #endif
4687
4688 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4689 according to the s390 GCC maintainer. (The ABI specification is not
4690 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4691 VALGRIND_RESTORE_STACK are not defined here. */
4692
4693 /* These regs are trashed by the hidden call. Note that we overwrite
4694 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4695 function a proper return address. All others are ABI defined call
4696 clobbers. */
4697 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4698 "f0","f1","f2","f3","f4","f5","f6","f7"
4699
4700 /* Nb: Although r11 is modified in the asm snippets below (inside
4701 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4702 two reasons:
4703 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4704 modified
4705 (2) GCC will complain that r11 cannot appear inside a clobber section,
4706 when compiled with -O -fno-omit-frame-pointer
4707 */
4708
4709 #define CALL_FN_W_v(lval, orig) \
4710 do { \
4711 volatile OrigFn _orig = (orig); \
4712 volatile unsigned long _argvec[1]; \
4713 volatile unsigned long _res; \
4714 _argvec[0] = (unsigned long)_orig.nraddr; \
4715 __asm__ volatile( \
4716 VALGRIND_CFI_PROLOGUE \
4717 "aghi 15,-160\n\t" \
4718 "lg 1, 0(1)\n\t" /* target->r1 */ \
4719 VALGRIND_CALL_NOREDIR_R1 \
4720 "lgr %0, 2\n\t" \
4721 "aghi 15,160\n\t" \
4722 VALGRIND_CFI_EPILOGUE \
4723 : /*out*/ "=d" (_res) \
4724 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4725 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4726 ); \
4727 lval = (__typeof__(lval)) _res; \
4728 } while (0)
4729
4730 /* The call abi has the arguments in r2-r6 and stack */
4731 #define CALL_FN_W_W(lval, orig, arg1) \
4732 do { \
4733 volatile OrigFn _orig = (orig); \
4734 volatile unsigned long _argvec[2]; \
4735 volatile unsigned long _res; \
4736 _argvec[0] = (unsigned long)_orig.nraddr; \
4737 _argvec[1] = (unsigned long)arg1; \
4738 __asm__ volatile( \
4739 VALGRIND_CFI_PROLOGUE \
4740 "aghi 15,-160\n\t" \
4741 "lg 2, 8(1)\n\t" \
4742 "lg 1, 0(1)\n\t" \
4743 VALGRIND_CALL_NOREDIR_R1 \
4744 "lgr %0, 2\n\t" \
4745 "aghi 15,160\n\t" \
4746 VALGRIND_CFI_EPILOGUE \
4747 : /*out*/ "=d" (_res) \
4748 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4749 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4750 ); \
4751 lval = (__typeof__(lval)) _res; \
4752 } while (0)
4753
4754 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4755 do { \
4756 volatile OrigFn _orig = (orig); \
4757 volatile unsigned long _argvec[3]; \
4758 volatile unsigned long _res; \
4759 _argvec[0] = (unsigned long)_orig.nraddr; \
4760 _argvec[1] = (unsigned long)arg1; \
4761 _argvec[2] = (unsigned long)arg2; \
4762 __asm__ volatile( \
4763 VALGRIND_CFI_PROLOGUE \
4764 "aghi 15,-160\n\t" \
4765 "lg 2, 8(1)\n\t" \
4766 "lg 3,16(1)\n\t" \
4767 "lg 1, 0(1)\n\t" \
4768 VALGRIND_CALL_NOREDIR_R1 \
4769 "lgr %0, 2\n\t" \
4770 "aghi 15,160\n\t" \
4771 VALGRIND_CFI_EPILOGUE \
4772 : /*out*/ "=d" (_res) \
4773 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4774 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4775 ); \
4776 lval = (__typeof__(lval)) _res; \
4777 } while (0)
4778
4779 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4780 do { \
4781 volatile OrigFn _orig = (orig); \
4782 volatile unsigned long _argvec[4]; \
4783 volatile unsigned long _res; \
4784 _argvec[0] = (unsigned long)_orig.nraddr; \
4785 _argvec[1] = (unsigned long)arg1; \
4786 _argvec[2] = (unsigned long)arg2; \
4787 _argvec[3] = (unsigned long)arg3; \
4788 __asm__ volatile( \
4789 VALGRIND_CFI_PROLOGUE \
4790 "aghi 15,-160\n\t" \
4791 "lg 2, 8(1)\n\t" \
4792 "lg 3,16(1)\n\t" \
4793 "lg 4,24(1)\n\t" \
4794 "lg 1, 0(1)\n\t" \
4795 VALGRIND_CALL_NOREDIR_R1 \
4796 "lgr %0, 2\n\t" \
4797 "aghi 15,160\n\t" \
4798 VALGRIND_CFI_EPILOGUE \
4799 : /*out*/ "=d" (_res) \
4800 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4801 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4802 ); \
4803 lval = (__typeof__(lval)) _res; \
4804 } while (0)
4805
4806 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4807 do { \
4808 volatile OrigFn _orig = (orig); \
4809 volatile unsigned long _argvec[5]; \
4810 volatile unsigned long _res; \
4811 _argvec[0] = (unsigned long)_orig.nraddr; \
4812 _argvec[1] = (unsigned long)arg1; \
4813 _argvec[2] = (unsigned long)arg2; \
4814 _argvec[3] = (unsigned long)arg3; \
4815 _argvec[4] = (unsigned long)arg4; \
4816 __asm__ volatile( \
4817 VALGRIND_CFI_PROLOGUE \
4818 "aghi 15,-160\n\t" \
4819 "lg 2, 8(1)\n\t" \
4820 "lg 3,16(1)\n\t" \
4821 "lg 4,24(1)\n\t" \
4822 "lg 5,32(1)\n\t" \
4823 "lg 1, 0(1)\n\t" \
4824 VALGRIND_CALL_NOREDIR_R1 \
4825 "lgr %0, 2\n\t" \
4826 "aghi 15,160\n\t" \
4827 VALGRIND_CFI_EPILOGUE \
4828 : /*out*/ "=d" (_res) \
4829 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4830 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4831 ); \
4832 lval = (__typeof__(lval)) _res; \
4833 } while (0)
4834
4835 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4836 do { \
4837 volatile OrigFn _orig = (orig); \
4838 volatile unsigned long _argvec[6]; \
4839 volatile unsigned long _res; \
4840 _argvec[0] = (unsigned long)_orig.nraddr; \
4841 _argvec[1] = (unsigned long)arg1; \
4842 _argvec[2] = (unsigned long)arg2; \
4843 _argvec[3] = (unsigned long)arg3; \
4844 _argvec[4] = (unsigned long)arg4; \
4845 _argvec[5] = (unsigned long)arg5; \
4846 __asm__ volatile( \
4847 VALGRIND_CFI_PROLOGUE \
4848 "aghi 15,-160\n\t" \
4849 "lg 2, 8(1)\n\t" \
4850 "lg 3,16(1)\n\t" \
4851 "lg 4,24(1)\n\t" \
4852 "lg 5,32(1)\n\t" \
4853 "lg 6,40(1)\n\t" \
4854 "lg 1, 0(1)\n\t" \
4855 VALGRIND_CALL_NOREDIR_R1 \
4856 "lgr %0, 2\n\t" \
4857 "aghi 15,160\n\t" \
4858 VALGRIND_CFI_EPILOGUE \
4859 : /*out*/ "=d" (_res) \
4860 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4861 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4862 ); \
4863 lval = (__typeof__(lval)) _res; \
4864 } while (0)
4865
4866 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4867 arg6) \
4868 do { \
4869 volatile OrigFn _orig = (orig); \
4870 volatile unsigned long _argvec[7]; \
4871 volatile unsigned long _res; \
4872 _argvec[0] = (unsigned long)_orig.nraddr; \
4873 _argvec[1] = (unsigned long)arg1; \
4874 _argvec[2] = (unsigned long)arg2; \
4875 _argvec[3] = (unsigned long)arg3; \
4876 _argvec[4] = (unsigned long)arg4; \
4877 _argvec[5] = (unsigned long)arg5; \
4878 _argvec[6] = (unsigned long)arg6; \
4879 __asm__ volatile( \
4880 VALGRIND_CFI_PROLOGUE \
4881 "aghi 15,-168\n\t" \
4882 "lg 2, 8(1)\n\t" \
4883 "lg 3,16(1)\n\t" \
4884 "lg 4,24(1)\n\t" \
4885 "lg 5,32(1)\n\t" \
4886 "lg 6,40(1)\n\t" \
4887 "mvc 160(8,15), 48(1)\n\t" \
4888 "lg 1, 0(1)\n\t" \
4889 VALGRIND_CALL_NOREDIR_R1 \
4890 "lgr %0, 2\n\t" \
4891 "aghi 15,168\n\t" \
4892 VALGRIND_CFI_EPILOGUE \
4893 : /*out*/ "=d" (_res) \
4894 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4895 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4896 ); \
4897 lval = (__typeof__(lval)) _res; \
4898 } while (0)
4899
4900 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4901 arg6, arg7) \
4902 do { \
4903 volatile OrigFn _orig = (orig); \
4904 volatile unsigned long _argvec[8]; \
4905 volatile unsigned long _res; \
4906 _argvec[0] = (unsigned long)_orig.nraddr; \
4907 _argvec[1] = (unsigned long)arg1; \
4908 _argvec[2] = (unsigned long)arg2; \
4909 _argvec[3] = (unsigned long)arg3; \
4910 _argvec[4] = (unsigned long)arg4; \
4911 _argvec[5] = (unsigned long)arg5; \
4912 _argvec[6] = (unsigned long)arg6; \
4913 _argvec[7] = (unsigned long)arg7; \
4914 __asm__ volatile( \
4915 VALGRIND_CFI_PROLOGUE \
4916 "aghi 15,-176\n\t" \
4917 "lg 2, 8(1)\n\t" \
4918 "lg 3,16(1)\n\t" \
4919 "lg 4,24(1)\n\t" \
4920 "lg 5,32(1)\n\t" \
4921 "lg 6,40(1)\n\t" \
4922 "mvc 160(8,15), 48(1)\n\t" \
4923 "mvc 168(8,15), 56(1)\n\t" \
4924 "lg 1, 0(1)\n\t" \
4925 VALGRIND_CALL_NOREDIR_R1 \
4926 "lgr %0, 2\n\t" \
4927 "aghi 15,176\n\t" \
4928 VALGRIND_CFI_EPILOGUE \
4929 : /*out*/ "=d" (_res) \
4930 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4931 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4932 ); \
4933 lval = (__typeof__(lval)) _res; \
4934 } while (0)
4935
4936 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4937 arg6, arg7 ,arg8) \
4938 do { \
4939 volatile OrigFn _orig = (orig); \
4940 volatile unsigned long _argvec[9]; \
4941 volatile unsigned long _res; \
4942 _argvec[0] = (unsigned long)_orig.nraddr; \
4943 _argvec[1] = (unsigned long)arg1; \
4944 _argvec[2] = (unsigned long)arg2; \
4945 _argvec[3] = (unsigned long)arg3; \
4946 _argvec[4] = (unsigned long)arg4; \
4947 _argvec[5] = (unsigned long)arg5; \
4948 _argvec[6] = (unsigned long)arg6; \
4949 _argvec[7] = (unsigned long)arg7; \
4950 _argvec[8] = (unsigned long)arg8; \
4951 __asm__ volatile( \
4952 VALGRIND_CFI_PROLOGUE \
4953 "aghi 15,-184\n\t" \
4954 "lg 2, 8(1)\n\t" \
4955 "lg 3,16(1)\n\t" \
4956 "lg 4,24(1)\n\t" \
4957 "lg 5,32(1)\n\t" \
4958 "lg 6,40(1)\n\t" \
4959 "mvc 160(8,15), 48(1)\n\t" \
4960 "mvc 168(8,15), 56(1)\n\t" \
4961 "mvc 176(8,15), 64(1)\n\t" \
4962 "lg 1, 0(1)\n\t" \
4963 VALGRIND_CALL_NOREDIR_R1 \
4964 "lgr %0, 2\n\t" \
4965 "aghi 15,184\n\t" \
4966 VALGRIND_CFI_EPILOGUE \
4967 : /*out*/ "=d" (_res) \
4968 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4969 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4970 ); \
4971 lval = (__typeof__(lval)) _res; \
4972 } while (0)
4973
4974 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4975 arg6, arg7 ,arg8, arg9) \
4976 do { \
4977 volatile OrigFn _orig = (orig); \
4978 volatile unsigned long _argvec[10]; \
4979 volatile unsigned long _res; \
4980 _argvec[0] = (unsigned long)_orig.nraddr; \
4981 _argvec[1] = (unsigned long)arg1; \
4982 _argvec[2] = (unsigned long)arg2; \
4983 _argvec[3] = (unsigned long)arg3; \
4984 _argvec[4] = (unsigned long)arg4; \
4985 _argvec[5] = (unsigned long)arg5; \
4986 _argvec[6] = (unsigned long)arg6; \
4987 _argvec[7] = (unsigned long)arg7; \
4988 _argvec[8] = (unsigned long)arg8; \
4989 _argvec[9] = (unsigned long)arg9; \
4990 __asm__ volatile( \
4991 VALGRIND_CFI_PROLOGUE \
4992 "aghi 15,-192\n\t" \
4993 "lg 2, 8(1)\n\t" \
4994 "lg 3,16(1)\n\t" \
4995 "lg 4,24(1)\n\t" \
4996 "lg 5,32(1)\n\t" \
4997 "lg 6,40(1)\n\t" \
4998 "mvc 160(8,15), 48(1)\n\t" \
4999 "mvc 168(8,15), 56(1)\n\t" \
5000 "mvc 176(8,15), 64(1)\n\t" \
5001 "mvc 184(8,15), 72(1)\n\t" \
5002 "lg 1, 0(1)\n\t" \
5003 VALGRIND_CALL_NOREDIR_R1 \
5004 "lgr %0, 2\n\t" \
5005 "aghi 15,192\n\t" \
5006 VALGRIND_CFI_EPILOGUE \
5007 : /*out*/ "=d" (_res) \
5008 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5009 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5010 ); \
5011 lval = (__typeof__(lval)) _res; \
5012 } while (0)
5013
5014 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5015 arg6, arg7 ,arg8, arg9, arg10) \
5016 do { \
5017 volatile OrigFn _orig = (orig); \
5018 volatile unsigned long _argvec[11]; \
5019 volatile unsigned long _res; \
5020 _argvec[0] = (unsigned long)_orig.nraddr; \
5021 _argvec[1] = (unsigned long)arg1; \
5022 _argvec[2] = (unsigned long)arg2; \
5023 _argvec[3] = (unsigned long)arg3; \
5024 _argvec[4] = (unsigned long)arg4; \
5025 _argvec[5] = (unsigned long)arg5; \
5026 _argvec[6] = (unsigned long)arg6; \
5027 _argvec[7] = (unsigned long)arg7; \
5028 _argvec[8] = (unsigned long)arg8; \
5029 _argvec[9] = (unsigned long)arg9; \
5030 _argvec[10] = (unsigned long)arg10; \
5031 __asm__ volatile( \
5032 VALGRIND_CFI_PROLOGUE \
5033 "aghi 15,-200\n\t" \
5034 "lg 2, 8(1)\n\t" \
5035 "lg 3,16(1)\n\t" \
5036 "lg 4,24(1)\n\t" \
5037 "lg 5,32(1)\n\t" \
5038 "lg 6,40(1)\n\t" \
5039 "mvc 160(8,15), 48(1)\n\t" \
5040 "mvc 168(8,15), 56(1)\n\t" \
5041 "mvc 176(8,15), 64(1)\n\t" \
5042 "mvc 184(8,15), 72(1)\n\t" \
5043 "mvc 192(8,15), 80(1)\n\t" \
5044 "lg 1, 0(1)\n\t" \
5045 VALGRIND_CALL_NOREDIR_R1 \
5046 "lgr %0, 2\n\t" \
5047 "aghi 15,200\n\t" \
5048 VALGRIND_CFI_EPILOGUE \
5049 : /*out*/ "=d" (_res) \
5050 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5051 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5052 ); \
5053 lval = (__typeof__(lval)) _res; \
5054 } while (0)
5055
5056 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5057 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5058 do { \
5059 volatile OrigFn _orig = (orig); \
5060 volatile unsigned long _argvec[12]; \
5061 volatile unsigned long _res; \
5062 _argvec[0] = (unsigned long)_orig.nraddr; \
5063 _argvec[1] = (unsigned long)arg1; \
5064 _argvec[2] = (unsigned long)arg2; \
5065 _argvec[3] = (unsigned long)arg3; \
5066 _argvec[4] = (unsigned long)arg4; \
5067 _argvec[5] = (unsigned long)arg5; \
5068 _argvec[6] = (unsigned long)arg6; \
5069 _argvec[7] = (unsigned long)arg7; \
5070 _argvec[8] = (unsigned long)arg8; \
5071 _argvec[9] = (unsigned long)arg9; \
5072 _argvec[10] = (unsigned long)arg10; \
5073 _argvec[11] = (unsigned long)arg11; \
5074 __asm__ volatile( \
5075 VALGRIND_CFI_PROLOGUE \
5076 "aghi 15,-208\n\t" \
5077 "lg 2, 8(1)\n\t" \
5078 "lg 3,16(1)\n\t" \
5079 "lg 4,24(1)\n\t" \
5080 "lg 5,32(1)\n\t" \
5081 "lg 6,40(1)\n\t" \
5082 "mvc 160(8,15), 48(1)\n\t" \
5083 "mvc 168(8,15), 56(1)\n\t" \
5084 "mvc 176(8,15), 64(1)\n\t" \
5085 "mvc 184(8,15), 72(1)\n\t" \
5086 "mvc 192(8,15), 80(1)\n\t" \
5087 "mvc 200(8,15), 88(1)\n\t" \
5088 "lg 1, 0(1)\n\t" \
5089 VALGRIND_CALL_NOREDIR_R1 \
5090 "lgr %0, 2\n\t" \
5091 "aghi 15,208\n\t" \
5092 VALGRIND_CFI_EPILOGUE \
5093 : /*out*/ "=d" (_res) \
5094 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5095 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5096 ); \
5097 lval = (__typeof__(lval)) _res; \
5098 } while (0)
5099
5100 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5101 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5102 do { \
5103 volatile OrigFn _orig = (orig); \
5104 volatile unsigned long _argvec[13]; \
5105 volatile unsigned long _res; \
5106 _argvec[0] = (unsigned long)_orig.nraddr; \
5107 _argvec[1] = (unsigned long)arg1; \
5108 _argvec[2] = (unsigned long)arg2; \
5109 _argvec[3] = (unsigned long)arg3; \
5110 _argvec[4] = (unsigned long)arg4; \
5111 _argvec[5] = (unsigned long)arg5; \
5112 _argvec[6] = (unsigned long)arg6; \
5113 _argvec[7] = (unsigned long)arg7; \
5114 _argvec[8] = (unsigned long)arg8; \
5115 _argvec[9] = (unsigned long)arg9; \
5116 _argvec[10] = (unsigned long)arg10; \
5117 _argvec[11] = (unsigned long)arg11; \
5118 _argvec[12] = (unsigned long)arg12; \
5119 __asm__ volatile( \
5120 VALGRIND_CFI_PROLOGUE \
5121 "aghi 15,-216\n\t" \
5122 "lg 2, 8(1)\n\t" \
5123 "lg 3,16(1)\n\t" \
5124 "lg 4,24(1)\n\t" \
5125 "lg 5,32(1)\n\t" \
5126 "lg 6,40(1)\n\t" \
5127 "mvc 160(8,15), 48(1)\n\t" \
5128 "mvc 168(8,15), 56(1)\n\t" \
5129 "mvc 176(8,15), 64(1)\n\t" \
5130 "mvc 184(8,15), 72(1)\n\t" \
5131 "mvc 192(8,15), 80(1)\n\t" \
5132 "mvc 200(8,15), 88(1)\n\t" \
5133 "mvc 208(8,15), 96(1)\n\t" \
5134 "lg 1, 0(1)\n\t" \
5135 VALGRIND_CALL_NOREDIR_R1 \
5136 "lgr %0, 2\n\t" \
5137 "aghi 15,216\n\t" \
5138 VALGRIND_CFI_EPILOGUE \
5139 : /*out*/ "=d" (_res) \
5140 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5141 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5142 ); \
5143 lval = (__typeof__(lval)) _res; \
5144 } while (0)
5145
5146
5147 #endif /* PLAT_s390x_linux */
5148
5149 /* ------------------------- mips32-linux ----------------------- */
5150
5151 #if defined(PLAT_mips32_linux)
5152
5153 /* These regs are trashed by the hidden call. */
5154 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5155 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5156 "$25", "$31"
5157
5158 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5159 long) == 4. */
5160
5161 #define CALL_FN_W_v(lval, orig) \
5162 do { \
5163 volatile OrigFn _orig = (orig); \
5164 volatile unsigned long _argvec[1]; \
5165 volatile unsigned long _res; \
5166 _argvec[0] = (unsigned long)_orig.nraddr; \
5167 __asm__ volatile( \
5168 "subu $29, $29, 8 \n\t" \
5169 "sw $28, 0($29) \n\t" \
5170 "sw $31, 4($29) \n\t" \
5171 "subu $29, $29, 16 \n\t" \
5172 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5173 VALGRIND_CALL_NOREDIR_T9 \
5174 "addu $29, $29, 16\n\t" \
5175 "lw $28, 0($29) \n\t" \
5176 "lw $31, 4($29) \n\t" \
5177 "addu $29, $29, 8 \n\t" \
5178 "move %0, $2\n" \
5179 : /*out*/ "=r" (_res) \
5180 : /*in*/ "0" (&_argvec[0]) \
5181 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5182 ); \
5183 lval = (__typeof__(lval)) _res; \
5184 } while (0)
5185
5186 #define CALL_FN_W_W(lval, orig, arg1) \
5187 do { \
5188 volatile OrigFn _orig = (orig); \
5189 volatile unsigned long _argvec[2]; \
5190 volatile unsigned long _res; \
5191 _argvec[0] = (unsigned long)_orig.nraddr; \
5192 _argvec[1] = (unsigned long)(arg1); \
5193 __asm__ volatile( \
5194 "subu $29, $29, 8 \n\t" \
5195 "sw $28, 0($29) \n\t" \
5196 "sw $31, 4($29) \n\t" \
5197 "subu $29, $29, 16 \n\t" \
5198 "lw $4, 4(%1) \n\t" /* arg1*/ \
5199 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5200 VALGRIND_CALL_NOREDIR_T9 \
5201 "addu $29, $29, 16 \n\t" \
5202 "lw $28, 0($29) \n\t" \
5203 "lw $31, 4($29) \n\t" \
5204 "addu $29, $29, 8 \n\t" \
5205 "move %0, $2\n" \
5206 : /*out*/ "=r" (_res) \
5207 : /*in*/ "0" (&_argvec[0]) \
5208 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5209 ); \
5210 lval = (__typeof__(lval)) _res; \
5211 } while (0)
5212
5213 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5214 do { \
5215 volatile OrigFn _orig = (orig); \
5216 volatile unsigned long _argvec[3]; \
5217 volatile unsigned long _res; \
5218 _argvec[0] = (unsigned long)_orig.nraddr; \
5219 _argvec[1] = (unsigned long)(arg1); \
5220 _argvec[2] = (unsigned long)(arg2); \
5221 __asm__ volatile( \
5222 "subu $29, $29, 8 \n\t" \
5223 "sw $28, 0($29) \n\t" \
5224 "sw $31, 4($29) \n\t" \
5225 "subu $29, $29, 16 \n\t" \
5226 "lw $4, 4(%1) \n\t" \
5227 "lw $5, 8(%1) \n\t" \
5228 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5229 VALGRIND_CALL_NOREDIR_T9 \
5230 "addu $29, $29, 16 \n\t" \
5231 "lw $28, 0($29) \n\t" \
5232 "lw $31, 4($29) \n\t" \
5233 "addu $29, $29, 8 \n\t" \
5234 "move %0, $2\n" \
5235 : /*out*/ "=r" (_res) \
5236 : /*in*/ "0" (&_argvec[0]) \
5237 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5238 ); \
5239 lval = (__typeof__(lval)) _res; \
5240 } while (0)
5241
5242 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5243 do { \
5244 volatile OrigFn _orig = (orig); \
5245 volatile unsigned long _argvec[4]; \
5246 volatile unsigned long _res; \
5247 _argvec[0] = (unsigned long)_orig.nraddr; \
5248 _argvec[1] = (unsigned long)(arg1); \
5249 _argvec[2] = (unsigned long)(arg2); \
5250 _argvec[3] = (unsigned long)(arg3); \
5251 __asm__ volatile( \
5252 "subu $29, $29, 8 \n\t" \
5253 "sw $28, 0($29) \n\t" \
5254 "sw $31, 4($29) \n\t" \
5255 "subu $29, $29, 16 \n\t" \
5256 "lw $4, 4(%1) \n\t" \
5257 "lw $5, 8(%1) \n\t" \
5258 "lw $6, 12(%1) \n\t" \
5259 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5260 VALGRIND_CALL_NOREDIR_T9 \
5261 "addu $29, $29, 16 \n\t" \
5262 "lw $28, 0($29) \n\t" \
5263 "lw $31, 4($29) \n\t" \
5264 "addu $29, $29, 8 \n\t" \
5265 "move %0, $2\n" \
5266 : /*out*/ "=r" (_res) \
5267 : /*in*/ "0" (&_argvec[0]) \
5268 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5269 ); \
5270 lval = (__typeof__(lval)) _res; \
5271 } while (0)
5272
5273 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5274 do { \
5275 volatile OrigFn _orig = (orig); \
5276 volatile unsigned long _argvec[5]; \
5277 volatile unsigned long _res; \
5278 _argvec[0] = (unsigned long)_orig.nraddr; \
5279 _argvec[1] = (unsigned long)(arg1); \
5280 _argvec[2] = (unsigned long)(arg2); \
5281 _argvec[3] = (unsigned long)(arg3); \
5282 _argvec[4] = (unsigned long)(arg4); \
5283 __asm__ volatile( \
5284 "subu $29, $29, 8 \n\t" \
5285 "sw $28, 0($29) \n\t" \
5286 "sw $31, 4($29) \n\t" \
5287 "subu $29, $29, 16 \n\t" \
5288 "lw $4, 4(%1) \n\t" \
5289 "lw $5, 8(%1) \n\t" \
5290 "lw $6, 12(%1) \n\t" \
5291 "lw $7, 16(%1) \n\t" \
5292 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5293 VALGRIND_CALL_NOREDIR_T9 \
5294 "addu $29, $29, 16 \n\t" \
5295 "lw $28, 0($29) \n\t" \
5296 "lw $31, 4($29) \n\t" \
5297 "addu $29, $29, 8 \n\t" \
5298 "move %0, $2\n" \
5299 : /*out*/ "=r" (_res) \
5300 : /*in*/ "0" (&_argvec[0]) \
5301 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5302 ); \
5303 lval = (__typeof__(lval)) _res; \
5304 } while (0)
5305
5306 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5307 do { \
5308 volatile OrigFn _orig = (orig); \
5309 volatile unsigned long _argvec[6]; \
5310 volatile unsigned long _res; \
5311 _argvec[0] = (unsigned long)_orig.nraddr; \
5312 _argvec[1] = (unsigned long)(arg1); \
5313 _argvec[2] = (unsigned long)(arg2); \
5314 _argvec[3] = (unsigned long)(arg3); \
5315 _argvec[4] = (unsigned long)(arg4); \
5316 _argvec[5] = (unsigned long)(arg5); \
5317 __asm__ volatile( \
5318 "subu $29, $29, 8 \n\t" \
5319 "sw $28, 0($29) \n\t" \
5320 "sw $31, 4($29) \n\t" \
5321 "lw $4, 20(%1) \n\t" \
5322 "subu $29, $29, 24\n\t" \
5323 "sw $4, 16($29) \n\t" \
5324 "lw $4, 4(%1) \n\t" \
5325 "lw $5, 8(%1) \n\t" \
5326 "lw $6, 12(%1) \n\t" \
5327 "lw $7, 16(%1) \n\t" \
5328 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5329 VALGRIND_CALL_NOREDIR_T9 \
5330 "addu $29, $29, 24 \n\t" \
5331 "lw $28, 0($29) \n\t" \
5332 "lw $31, 4($29) \n\t" \
5333 "addu $29, $29, 8 \n\t" \
5334 "move %0, $2\n" \
5335 : /*out*/ "=r" (_res) \
5336 : /*in*/ "0" (&_argvec[0]) \
5337 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5338 ); \
5339 lval = (__typeof__(lval)) _res; \
5340 } while (0)
5341 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5342 do { \
5343 volatile OrigFn _orig = (orig); \
5344 volatile unsigned long _argvec[7]; \
5345 volatile unsigned long _res; \
5346 _argvec[0] = (unsigned long)_orig.nraddr; \
5347 _argvec[1] = (unsigned long)(arg1); \
5348 _argvec[2] = (unsigned long)(arg2); \
5349 _argvec[3] = (unsigned long)(arg3); \
5350 _argvec[4] = (unsigned long)(arg4); \
5351 _argvec[5] = (unsigned long)(arg5); \
5352 _argvec[6] = (unsigned long)(arg6); \
5353 __asm__ volatile( \
5354 "subu $29, $29, 8 \n\t" \
5355 "sw $28, 0($29) \n\t" \
5356 "sw $31, 4($29) \n\t" \
5357 "lw $4, 20(%1) \n\t" \
5358 "subu $29, $29, 32\n\t" \
5359 "sw $4, 16($29) \n\t" \
5360 "lw $4, 24(%1) \n\t" \
5361 "nop\n\t" \
5362 "sw $4, 20($29) \n\t" \
5363 "lw $4, 4(%1) \n\t" \
5364 "lw $5, 8(%1) \n\t" \
5365 "lw $6, 12(%1) \n\t" \
5366 "lw $7, 16(%1) \n\t" \
5367 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5368 VALGRIND_CALL_NOREDIR_T9 \
5369 "addu $29, $29, 32 \n\t" \
5370 "lw $28, 0($29) \n\t" \
5371 "lw $31, 4($29) \n\t" \
5372 "addu $29, $29, 8 \n\t" \
5373 "move %0, $2\n" \
5374 : /*out*/ "=r" (_res) \
5375 : /*in*/ "0" (&_argvec[0]) \
5376 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5377 ); \
5378 lval = (__typeof__(lval)) _res; \
5379 } while (0)
5380
5381 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5382 arg7) \
5383 do { \
5384 volatile OrigFn _orig = (orig); \
5385 volatile unsigned long _argvec[8]; \
5386 volatile unsigned long _res; \
5387 _argvec[0] = (unsigned long)_orig.nraddr; \
5388 _argvec[1] = (unsigned long)(arg1); \
5389 _argvec[2] = (unsigned long)(arg2); \
5390 _argvec[3] = (unsigned long)(arg3); \
5391 _argvec[4] = (unsigned long)(arg4); \
5392 _argvec[5] = (unsigned long)(arg5); \
5393 _argvec[6] = (unsigned long)(arg6); \
5394 _argvec[7] = (unsigned long)(arg7); \
5395 __asm__ volatile( \
5396 "subu $29, $29, 8 \n\t" \
5397 "sw $28, 0($29) \n\t" \
5398 "sw $31, 4($29) \n\t" \
5399 "lw $4, 20(%1) \n\t" \
5400 "subu $29, $29, 32\n\t" \
5401 "sw $4, 16($29) \n\t" \
5402 "lw $4, 24(%1) \n\t" \
5403 "sw $4, 20($29) \n\t" \
5404 "lw $4, 28(%1) \n\t" \
5405 "sw $4, 24($29) \n\t" \
5406 "lw $4, 4(%1) \n\t" \
5407 "lw $5, 8(%1) \n\t" \
5408 "lw $6, 12(%1) \n\t" \
5409 "lw $7, 16(%1) \n\t" \
5410 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5411 VALGRIND_CALL_NOREDIR_T9 \
5412 "addu $29, $29, 32 \n\t" \
5413 "lw $28, 0($29) \n\t" \
5414 "lw $31, 4($29) \n\t" \
5415 "addu $29, $29, 8 \n\t" \
5416 "move %0, $2\n" \
5417 : /*out*/ "=r" (_res) \
5418 : /*in*/ "0" (&_argvec[0]) \
5419 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5420 ); \
5421 lval = (__typeof__(lval)) _res; \
5422 } while (0)
5423
5424 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5425 arg7,arg8) \
5426 do { \
5427 volatile OrigFn _orig = (orig); \
5428 volatile unsigned long _argvec[9]; \
5429 volatile unsigned long _res; \
5430 _argvec[0] = (unsigned long)_orig.nraddr; \
5431 _argvec[1] = (unsigned long)(arg1); \
5432 _argvec[2] = (unsigned long)(arg2); \
5433 _argvec[3] = (unsigned long)(arg3); \
5434 _argvec[4] = (unsigned long)(arg4); \
5435 _argvec[5] = (unsigned long)(arg5); \
5436 _argvec[6] = (unsigned long)(arg6); \
5437 _argvec[7] = (unsigned long)(arg7); \
5438 _argvec[8] = (unsigned long)(arg8); \
5439 __asm__ volatile( \
5440 "subu $29, $29, 8 \n\t" \
5441 "sw $28, 0($29) \n\t" \
5442 "sw $31, 4($29) \n\t" \
5443 "lw $4, 20(%1) \n\t" \
5444 "subu $29, $29, 40\n\t" \
5445 "sw $4, 16($29) \n\t" \
5446 "lw $4, 24(%1) \n\t" \
5447 "sw $4, 20($29) \n\t" \
5448 "lw $4, 28(%1) \n\t" \
5449 "sw $4, 24($29) \n\t" \
5450 "lw $4, 32(%1) \n\t" \
5451 "sw $4, 28($29) \n\t" \
5452 "lw $4, 4(%1) \n\t" \
5453 "lw $5, 8(%1) \n\t" \
5454 "lw $6, 12(%1) \n\t" \
5455 "lw $7, 16(%1) \n\t" \
5456 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5457 VALGRIND_CALL_NOREDIR_T9 \
5458 "addu $29, $29, 40 \n\t" \
5459 "lw $28, 0($29) \n\t" \
5460 "lw $31, 4($29) \n\t" \
5461 "addu $29, $29, 8 \n\t" \
5462 "move %0, $2\n" \
5463 : /*out*/ "=r" (_res) \
5464 : /*in*/ "0" (&_argvec[0]) \
5465 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5466 ); \
5467 lval = (__typeof__(lval)) _res; \
5468 } while (0)
5469
5470 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5471 arg7,arg8,arg9) \
5472 do { \
5473 volatile OrigFn _orig = (orig); \
5474 volatile unsigned long _argvec[10]; \
5475 volatile unsigned long _res; \
5476 _argvec[0] = (unsigned long)_orig.nraddr; \
5477 _argvec[1] = (unsigned long)(arg1); \
5478 _argvec[2] = (unsigned long)(arg2); \
5479 _argvec[3] = (unsigned long)(arg3); \
5480 _argvec[4] = (unsigned long)(arg4); \
5481 _argvec[5] = (unsigned long)(arg5); \
5482 _argvec[6] = (unsigned long)(arg6); \
5483 _argvec[7] = (unsigned long)(arg7); \
5484 _argvec[8] = (unsigned long)(arg8); \
5485 _argvec[9] = (unsigned long)(arg9); \
5486 __asm__ volatile( \
5487 "subu $29, $29, 8 \n\t" \
5488 "sw $28, 0($29) \n\t" \
5489 "sw $31, 4($29) \n\t" \
5490 "lw $4, 20(%1) \n\t" \
5491 "subu $29, $29, 40\n\t" \
5492 "sw $4, 16($29) \n\t" \
5493 "lw $4, 24(%1) \n\t" \
5494 "sw $4, 20($29) \n\t" \
5495 "lw $4, 28(%1) \n\t" \
5496 "sw $4, 24($29) \n\t" \
5497 "lw $4, 32(%1) \n\t" \
5498 "sw $4, 28($29) \n\t" \
5499 "lw $4, 36(%1) \n\t" \
5500 "sw $4, 32($29) \n\t" \
5501 "lw $4, 4(%1) \n\t" \
5502 "lw $5, 8(%1) \n\t" \
5503 "lw $6, 12(%1) \n\t" \
5504 "lw $7, 16(%1) \n\t" \
5505 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5506 VALGRIND_CALL_NOREDIR_T9 \
5507 "addu $29, $29, 40 \n\t" \
5508 "lw $28, 0($29) \n\t" \
5509 "lw $31, 4($29) \n\t" \
5510 "addu $29, $29, 8 \n\t" \
5511 "move %0, $2\n" \
5512 : /*out*/ "=r" (_res) \
5513 : /*in*/ "0" (&_argvec[0]) \
5514 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5515 ); \
5516 lval = (__typeof__(lval)) _res; \
5517 } while (0)
5518
5519 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5520 arg7,arg8,arg9,arg10) \
5521 do { \
5522 volatile OrigFn _orig = (orig); \
5523 volatile unsigned long _argvec[11]; \
5524 volatile unsigned long _res; \
5525 _argvec[0] = (unsigned long)_orig.nraddr; \
5526 _argvec[1] = (unsigned long)(arg1); \
5527 _argvec[2] = (unsigned long)(arg2); \
5528 _argvec[3] = (unsigned long)(arg3); \
5529 _argvec[4] = (unsigned long)(arg4); \
5530 _argvec[5] = (unsigned long)(arg5); \
5531 _argvec[6] = (unsigned long)(arg6); \
5532 _argvec[7] = (unsigned long)(arg7); \
5533 _argvec[8] = (unsigned long)(arg8); \
5534 _argvec[9] = (unsigned long)(arg9); \
5535 _argvec[10] = (unsigned long)(arg10); \
5536 __asm__ volatile( \
5537 "subu $29, $29, 8 \n\t" \
5538 "sw $28, 0($29) \n\t" \
5539 "sw $31, 4($29) \n\t" \
5540 "lw $4, 20(%1) \n\t" \
5541 "subu $29, $29, 48\n\t" \
5542 "sw $4, 16($29) \n\t" \
5543 "lw $4, 24(%1) \n\t" \
5544 "sw $4, 20($29) \n\t" \
5545 "lw $4, 28(%1) \n\t" \
5546 "sw $4, 24($29) \n\t" \
5547 "lw $4, 32(%1) \n\t" \
5548 "sw $4, 28($29) \n\t" \
5549 "lw $4, 36(%1) \n\t" \
5550 "sw $4, 32($29) \n\t" \
5551 "lw $4, 40(%1) \n\t" \
5552 "sw $4, 36($29) \n\t" \
5553 "lw $4, 4(%1) \n\t" \
5554 "lw $5, 8(%1) \n\t" \
5555 "lw $6, 12(%1) \n\t" \
5556 "lw $7, 16(%1) \n\t" \
5557 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5558 VALGRIND_CALL_NOREDIR_T9 \
5559 "addu $29, $29, 48 \n\t" \
5560 "lw $28, 0($29) \n\t" \
5561 "lw $31, 4($29) \n\t" \
5562 "addu $29, $29, 8 \n\t" \
5563 "move %0, $2\n" \
5564 : /*out*/ "=r" (_res) \
5565 : /*in*/ "0" (&_argvec[0]) \
5566 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5567 ); \
5568 lval = (__typeof__(lval)) _res; \
5569 } while (0)
5570
5571 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5572 arg6,arg7,arg8,arg9,arg10, \
5573 arg11) \
5574 do { \
5575 volatile OrigFn _orig = (orig); \
5576 volatile unsigned long _argvec[12]; \
5577 volatile unsigned long _res; \
5578 _argvec[0] = (unsigned long)_orig.nraddr; \
5579 _argvec[1] = (unsigned long)(arg1); \
5580 _argvec[2] = (unsigned long)(arg2); \
5581 _argvec[3] = (unsigned long)(arg3); \
5582 _argvec[4] = (unsigned long)(arg4); \
5583 _argvec[5] = (unsigned long)(arg5); \
5584 _argvec[6] = (unsigned long)(arg6); \
5585 _argvec[7] = (unsigned long)(arg7); \
5586 _argvec[8] = (unsigned long)(arg8); \
5587 _argvec[9] = (unsigned long)(arg9); \
5588 _argvec[10] = (unsigned long)(arg10); \
5589 _argvec[11] = (unsigned long)(arg11); \
5590 __asm__ volatile( \
5591 "subu $29, $29, 8 \n\t" \
5592 "sw $28, 0($29) \n\t" \
5593 "sw $31, 4($29) \n\t" \
5594 "lw $4, 20(%1) \n\t" \
5595 "subu $29, $29, 48\n\t" \
5596 "sw $4, 16($29) \n\t" \
5597 "lw $4, 24(%1) \n\t" \
5598 "sw $4, 20($29) \n\t" \
5599 "lw $4, 28(%1) \n\t" \
5600 "sw $4, 24($29) \n\t" \
5601 "lw $4, 32(%1) \n\t" \
5602 "sw $4, 28($29) \n\t" \
5603 "lw $4, 36(%1) \n\t" \
5604 "sw $4, 32($29) \n\t" \
5605 "lw $4, 40(%1) \n\t" \
5606 "sw $4, 36($29) \n\t" \
5607 "lw $4, 44(%1) \n\t" \
5608 "sw $4, 40($29) \n\t" \
5609 "lw $4, 4(%1) \n\t" \
5610 "lw $5, 8(%1) \n\t" \
5611 "lw $6, 12(%1) \n\t" \
5612 "lw $7, 16(%1) \n\t" \
5613 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5614 VALGRIND_CALL_NOREDIR_T9 \
5615 "addu $29, $29, 48 \n\t" \
5616 "lw $28, 0($29) \n\t" \
5617 "lw $31, 4($29) \n\t" \
5618 "addu $29, $29, 8 \n\t" \
5619 "move %0, $2\n" \
5620 : /*out*/ "=r" (_res) \
5621 : /*in*/ "0" (&_argvec[0]) \
5622 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5623 ); \
5624 lval = (__typeof__(lval)) _res; \
5625 } while (0)
5626
5627 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5628 arg6,arg7,arg8,arg9,arg10, \
5629 arg11,arg12) \
5630 do { \
5631 volatile OrigFn _orig = (orig); \
5632 volatile unsigned long _argvec[13]; \
5633 volatile unsigned long _res; \
5634 _argvec[0] = (unsigned long)_orig.nraddr; \
5635 _argvec[1] = (unsigned long)(arg1); \
5636 _argvec[2] = (unsigned long)(arg2); \
5637 _argvec[3] = (unsigned long)(arg3); \
5638 _argvec[4] = (unsigned long)(arg4); \
5639 _argvec[5] = (unsigned long)(arg5); \
5640 _argvec[6] = (unsigned long)(arg6); \
5641 _argvec[7] = (unsigned long)(arg7); \
5642 _argvec[8] = (unsigned long)(arg8); \
5643 _argvec[9] = (unsigned long)(arg9); \
5644 _argvec[10] = (unsigned long)(arg10); \
5645 _argvec[11] = (unsigned long)(arg11); \
5646 _argvec[12] = (unsigned long)(arg12); \
5647 __asm__ volatile( \
5648 "subu $29, $29, 8 \n\t" \
5649 "sw $28, 0($29) \n\t" \
5650 "sw $31, 4($29) \n\t" \
5651 "lw $4, 20(%1) \n\t" \
5652 "subu $29, $29, 56\n\t" \
5653 "sw $4, 16($29) \n\t" \
5654 "lw $4, 24(%1) \n\t" \
5655 "sw $4, 20($29) \n\t" \
5656 "lw $4, 28(%1) \n\t" \
5657 "sw $4, 24($29) \n\t" \
5658 "lw $4, 32(%1) \n\t" \
5659 "sw $4, 28($29) \n\t" \
5660 "lw $4, 36(%1) \n\t" \
5661 "sw $4, 32($29) \n\t" \
5662 "lw $4, 40(%1) \n\t" \
5663 "sw $4, 36($29) \n\t" \
5664 "lw $4, 44(%1) \n\t" \
5665 "sw $4, 40($29) \n\t" \
5666 "lw $4, 48(%1) \n\t" \
5667 "sw $4, 44($29) \n\t" \
5668 "lw $4, 4(%1) \n\t" \
5669 "lw $5, 8(%1) \n\t" \
5670 "lw $6, 12(%1) \n\t" \
5671 "lw $7, 16(%1) \n\t" \
5672 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5673 VALGRIND_CALL_NOREDIR_T9 \
5674 "addu $29, $29, 56 \n\t" \
5675 "lw $28, 0($29) \n\t" \
5676 "lw $31, 4($29) \n\t" \
5677 "addu $29, $29, 8 \n\t" \
5678 "move %0, $2\n" \
5679 : /*out*/ "=r" (_res) \
5680 : /*in*/ "r" (&_argvec[0]) \
5681 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5682 ); \
5683 lval = (__typeof__(lval)) _res; \
5684 } while (0)
5685
5686 #endif /* PLAT_mips32_linux */
5687
5688 /* ------------------------- mips64-linux ------------------------- */
5689
5690 #if defined(PLAT_mips64_linux)
5691
5692 /* These regs are trashed by the hidden call. */
5693 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5694 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5695 "$25", "$31"
5696
5697 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5698 long) == 4. */
5699
5700 #define CALL_FN_W_v(lval, orig) \
5701 do { \
5702 volatile OrigFn _orig = (orig); \
5703 volatile unsigned long _argvec[1]; \
5704 volatile unsigned long _res; \
5705 _argvec[0] = (unsigned long)_orig.nraddr; \
5706 __asm__ volatile( \
5707 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5708 VALGRIND_CALL_NOREDIR_T9 \
5709 "move %0, $2\n" \
5710 : /*out*/ "=r" (_res) \
5711 : /*in*/ "0" (&_argvec[0]) \
5712 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5713 ); \
5714 lval = (__typeof__(lval)) _res; \
5715 } while (0)
5716
5717 #define CALL_FN_W_W(lval, orig, arg1) \
5718 do { \
5719 volatile OrigFn _orig = (orig); \
5720 volatile unsigned long _argvec[2]; \
5721 volatile unsigned long _res; \
5722 _argvec[0] = (unsigned long)_orig.nraddr; \
5723 _argvec[1] = (unsigned long)(arg1); \
5724 __asm__ volatile( \
5725 "ld $4, 8(%1)\n\t" /* arg1*/ \
5726 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5727 VALGRIND_CALL_NOREDIR_T9 \
5728 "move %0, $2\n" \
5729 : /*out*/ "=r" (_res) \
5730 : /*in*/ "r" (&_argvec[0]) \
5731 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5732 ); \
5733 lval = (__typeof__(lval)) _res; \
5734 } while (0)
5735
5736 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5737 do { \
5738 volatile OrigFn _orig = (orig); \
5739 volatile unsigned long _argvec[3]; \
5740 volatile unsigned long _res; \
5741 _argvec[0] = (unsigned long)_orig.nraddr; \
5742 _argvec[1] = (unsigned long)(arg1); \
5743 _argvec[2] = (unsigned long)(arg2); \
5744 __asm__ volatile( \
5745 "ld $4, 8(%1)\n\t" \
5746 "ld $5, 16(%1)\n\t" \
5747 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5748 VALGRIND_CALL_NOREDIR_T9 \
5749 "move %0, $2\n" \
5750 : /*out*/ "=r" (_res) \
5751 : /*in*/ "r" (&_argvec[0]) \
5752 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5753 ); \
5754 lval = (__typeof__(lval)) _res; \
5755 } while (0)
5756
5757 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5758 do { \
5759 volatile OrigFn _orig = (orig); \
5760 volatile unsigned long _argvec[4]; \
5761 volatile unsigned long _res; \
5762 _argvec[0] = (unsigned long)_orig.nraddr; \
5763 _argvec[1] = (unsigned long)(arg1); \
5764 _argvec[2] = (unsigned long)(arg2); \
5765 _argvec[3] = (unsigned long)(arg3); \
5766 __asm__ volatile( \
5767 "ld $4, 8(%1)\n\t" \
5768 "ld $5, 16(%1)\n\t" \
5769 "ld $6, 24(%1)\n\t" \
5770 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5771 VALGRIND_CALL_NOREDIR_T9 \
5772 "move %0, $2\n" \
5773 : /*out*/ "=r" (_res) \
5774 : /*in*/ "r" (&_argvec[0]) \
5775 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5776 ); \
5777 lval = (__typeof__(lval)) _res; \
5778 } while (0)
5779
5780 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5781 do { \
5782 volatile OrigFn _orig = (orig); \
5783 volatile unsigned long _argvec[5]; \
5784 volatile unsigned long _res; \
5785 _argvec[0] = (unsigned long)_orig.nraddr; \
5786 _argvec[1] = (unsigned long)(arg1); \
5787 _argvec[2] = (unsigned long)(arg2); \
5788 _argvec[3] = (unsigned long)(arg3); \
5789 _argvec[4] = (unsigned long)(arg4); \
5790 __asm__ volatile( \
5791 "ld $4, 8(%1)\n\t" \
5792 "ld $5, 16(%1)\n\t" \
5793 "ld $6, 24(%1)\n\t" \
5794 "ld $7, 32(%1)\n\t" \
5795 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5796 VALGRIND_CALL_NOREDIR_T9 \
5797 "move %0, $2\n" \
5798 : /*out*/ "=r" (_res) \
5799 : /*in*/ "r" (&_argvec[0]) \
5800 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5801 ); \
5802 lval = (__typeof__(lval)) _res; \
5803 } while (0)
5804
5805 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5806 do { \
5807 volatile OrigFn _orig = (orig); \
5808 volatile unsigned long _argvec[6]; \
5809 volatile unsigned long _res; \
5810 _argvec[0] = (unsigned long)_orig.nraddr; \
5811 _argvec[1] = (unsigned long)(arg1); \
5812 _argvec[2] = (unsigned long)(arg2); \
5813 _argvec[3] = (unsigned long)(arg3); \
5814 _argvec[4] = (unsigned long)(arg4); \
5815 _argvec[5] = (unsigned long)(arg5); \
5816 __asm__ volatile( \
5817 "ld $4, 8(%1)\n\t" \
5818 "ld $5, 16(%1)\n\t" \
5819 "ld $6, 24(%1)\n\t" \
5820 "ld $7, 32(%1)\n\t" \
5821 "ld $8, 40(%1)\n\t" \
5822 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5823 VALGRIND_CALL_NOREDIR_T9 \
5824 "move %0, $2\n" \
5825 : /*out*/ "=r" (_res) \
5826 : /*in*/ "r" (&_argvec[0]) \
5827 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5828 ); \
5829 lval = (__typeof__(lval)) _res; \
5830 } while (0)
5831
5832 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5833 do { \
5834 volatile OrigFn _orig = (orig); \
5835 volatile unsigned long _argvec[7]; \
5836 volatile unsigned long _res; \
5837 _argvec[0] = (unsigned long)_orig.nraddr; \
5838 _argvec[1] = (unsigned long)(arg1); \
5839 _argvec[2] = (unsigned long)(arg2); \
5840 _argvec[3] = (unsigned long)(arg3); \
5841 _argvec[4] = (unsigned long)(arg4); \
5842 _argvec[5] = (unsigned long)(arg5); \
5843 _argvec[6] = (unsigned long)(arg6); \
5844 __asm__ volatile( \
5845 "ld $4, 8(%1)\n\t" \
5846 "ld $5, 16(%1)\n\t" \
5847 "ld $6, 24(%1)\n\t" \
5848 "ld $7, 32(%1)\n\t" \
5849 "ld $8, 40(%1)\n\t" \
5850 "ld $9, 48(%1)\n\t" \
5851 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5852 VALGRIND_CALL_NOREDIR_T9 \
5853 "move %0, $2\n" \
5854 : /*out*/ "=r" (_res) \
5855 : /*in*/ "r" (&_argvec[0]) \
5856 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5857 ); \
5858 lval = (__typeof__(lval)) _res; \
5859 } while (0)
5860
5861 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5862 arg7) \
5863 do { \
5864 volatile OrigFn _orig = (orig); \
5865 volatile unsigned long _argvec[8]; \
5866 volatile unsigned long _res; \
5867 _argvec[0] = (unsigned long)_orig.nraddr; \
5868 _argvec[1] = (unsigned long)(arg1); \
5869 _argvec[2] = (unsigned long)(arg2); \
5870 _argvec[3] = (unsigned long)(arg3); \
5871 _argvec[4] = (unsigned long)(arg4); \
5872 _argvec[5] = (unsigned long)(arg5); \
5873 _argvec[6] = (unsigned long)(arg6); \
5874 _argvec[7] = (unsigned long)(arg7); \
5875 __asm__ volatile( \
5876 "ld $4, 8(%1)\n\t" \
5877 "ld $5, 16(%1)\n\t" \
5878 "ld $6, 24(%1)\n\t" \
5879 "ld $7, 32(%1)\n\t" \
5880 "ld $8, 40(%1)\n\t" \
5881 "ld $9, 48(%1)\n\t" \
5882 "ld $10, 56(%1)\n\t" \
5883 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5884 VALGRIND_CALL_NOREDIR_T9 \
5885 "move %0, $2\n" \
5886 : /*out*/ "=r" (_res) \
5887 : /*in*/ "r" (&_argvec[0]) \
5888 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5889 ); \
5890 lval = (__typeof__(lval)) _res; \
5891 } while (0)
5892
5893 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5894 arg7,arg8) \
5895 do { \
5896 volatile OrigFn _orig = (orig); \
5897 volatile unsigned long _argvec[9]; \
5898 volatile unsigned long _res; \
5899 _argvec[0] = (unsigned long)_orig.nraddr; \
5900 _argvec[1] = (unsigned long)(arg1); \
5901 _argvec[2] = (unsigned long)(arg2); \
5902 _argvec[3] = (unsigned long)(arg3); \
5903 _argvec[4] = (unsigned long)(arg4); \
5904 _argvec[5] = (unsigned long)(arg5); \
5905 _argvec[6] = (unsigned long)(arg6); \
5906 _argvec[7] = (unsigned long)(arg7); \
5907 _argvec[8] = (unsigned long)(arg8); \
5908 __asm__ volatile( \
5909 "ld $4, 8(%1)\n\t" \
5910 "ld $5, 16(%1)\n\t" \
5911 "ld $6, 24(%1)\n\t" \
5912 "ld $7, 32(%1)\n\t" \
5913 "ld $8, 40(%1)\n\t" \
5914 "ld $9, 48(%1)\n\t" \
5915 "ld $10, 56(%1)\n\t" \
5916 "ld $11, 64(%1)\n\t" \
5917 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5918 VALGRIND_CALL_NOREDIR_T9 \
5919 "move %0, $2\n" \
5920 : /*out*/ "=r" (_res) \
5921 : /*in*/ "r" (&_argvec[0]) \
5922 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5923 ); \
5924 lval = (__typeof__(lval)) _res; \
5925 } while (0)
5926
5927 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5928 arg7,arg8,arg9) \
5929 do { \
5930 volatile OrigFn _orig = (orig); \
5931 volatile unsigned long _argvec[10]; \
5932 volatile unsigned long _res; \
5933 _argvec[0] = (unsigned long)_orig.nraddr; \
5934 _argvec[1] = (unsigned long)(arg1); \
5935 _argvec[2] = (unsigned long)(arg2); \
5936 _argvec[3] = (unsigned long)(arg3); \
5937 _argvec[4] = (unsigned long)(arg4); \
5938 _argvec[5] = (unsigned long)(arg5); \
5939 _argvec[6] = (unsigned long)(arg6); \
5940 _argvec[7] = (unsigned long)(arg7); \
5941 _argvec[8] = (unsigned long)(arg8); \
5942 _argvec[9] = (unsigned long)(arg9); \
5943 __asm__ volatile( \
5944 "dsubu $29, $29, 8\n\t" \
5945 "ld $4, 72(%1)\n\t" \
5946 "sd $4, 0($29)\n\t" \
5947 "ld $4, 8(%1)\n\t" \
5948 "ld $5, 16(%1)\n\t" \
5949 "ld $6, 24(%1)\n\t" \
5950 "ld $7, 32(%1)\n\t" \
5951 "ld $8, 40(%1)\n\t" \
5952 "ld $9, 48(%1)\n\t" \
5953 "ld $10, 56(%1)\n\t" \
5954 "ld $11, 64(%1)\n\t" \
5955 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5956 VALGRIND_CALL_NOREDIR_T9 \
5957 "daddu $29, $29, 8\n\t" \
5958 "move %0, $2\n" \
5959 : /*out*/ "=r" (_res) \
5960 : /*in*/ "r" (&_argvec[0]) \
5961 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5962 ); \
5963 lval = (__typeof__(lval)) _res; \
5964 } while (0)
5965
5966 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5967 arg7,arg8,arg9,arg10) \
5968 do { \
5969 volatile OrigFn _orig = (orig); \
5970 volatile unsigned long _argvec[11]; \
5971 volatile unsigned long _res; \
5972 _argvec[0] = (unsigned long)_orig.nraddr; \
5973 _argvec[1] = (unsigned long)(arg1); \
5974 _argvec[2] = (unsigned long)(arg2); \
5975 _argvec[3] = (unsigned long)(arg3); \
5976 _argvec[4] = (unsigned long)(arg4); \
5977 _argvec[5] = (unsigned long)(arg5); \
5978 _argvec[6] = (unsigned long)(arg6); \
5979 _argvec[7] = (unsigned long)(arg7); \
5980 _argvec[8] = (unsigned long)(arg8); \
5981 _argvec[9] = (unsigned long)(arg9); \
5982 _argvec[10] = (unsigned long)(arg10); \
5983 __asm__ volatile( \
5984 "dsubu $29, $29, 16\n\t" \
5985 "ld $4, 72(%1)\n\t" \
5986 "sd $4, 0($29)\n\t" \
5987 "ld $4, 80(%1)\n\t" \
5988 "sd $4, 8($29)\n\t" \
5989 "ld $4, 8(%1)\n\t" \
5990 "ld $5, 16(%1)\n\t" \
5991 "ld $6, 24(%1)\n\t" \
5992 "ld $7, 32(%1)\n\t" \
5993 "ld $8, 40(%1)\n\t" \
5994 "ld $9, 48(%1)\n\t" \
5995 "ld $10, 56(%1)\n\t" \
5996 "ld $11, 64(%1)\n\t" \
5997 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5998 VALGRIND_CALL_NOREDIR_T9 \
5999 "daddu $29, $29, 16\n\t" \
6000 "move %0, $2\n" \
6001 : /*out*/ "=r" (_res) \
6002 : /*in*/ "r" (&_argvec[0]) \
6003 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6004 ); \
6005 lval = (__typeof__(lval)) _res; \
6006 } while (0)
6007
6008 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6009 arg6,arg7,arg8,arg9,arg10, \
6010 arg11) \
6011 do { \
6012 volatile OrigFn _orig = (orig); \
6013 volatile unsigned long _argvec[12]; \
6014 volatile unsigned long _res; \
6015 _argvec[0] = (unsigned long)_orig.nraddr; \
6016 _argvec[1] = (unsigned long)(arg1); \
6017 _argvec[2] = (unsigned long)(arg2); \
6018 _argvec[3] = (unsigned long)(arg3); \
6019 _argvec[4] = (unsigned long)(arg4); \
6020 _argvec[5] = (unsigned long)(arg5); \
6021 _argvec[6] = (unsigned long)(arg6); \
6022 _argvec[7] = (unsigned long)(arg7); \
6023 _argvec[8] = (unsigned long)(arg8); \
6024 _argvec[9] = (unsigned long)(arg9); \
6025 _argvec[10] = (unsigned long)(arg10); \
6026 _argvec[11] = (unsigned long)(arg11); \
6027 __asm__ volatile( \
6028 "dsubu $29, $29, 24\n\t" \
6029 "ld $4, 72(%1)\n\t" \
6030 "sd $4, 0($29)\n\t" \
6031 "ld $4, 80(%1)\n\t" \
6032 "sd $4, 8($29)\n\t" \
6033 "ld $4, 88(%1)\n\t" \
6034 "sd $4, 16($29)\n\t" \
6035 "ld $4, 8(%1)\n\t" \
6036 "ld $5, 16(%1)\n\t" \
6037 "ld $6, 24(%1)\n\t" \
6038 "ld $7, 32(%1)\n\t" \
6039 "ld $8, 40(%1)\n\t" \
6040 "ld $9, 48(%1)\n\t" \
6041 "ld $10, 56(%1)\n\t" \
6042 "ld $11, 64(%1)\n\t" \
6043 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6044 VALGRIND_CALL_NOREDIR_T9 \
6045 "daddu $29, $29, 24\n\t" \
6046 "move %0, $2\n" \
6047 : /*out*/ "=r" (_res) \
6048 : /*in*/ "r" (&_argvec[0]) \
6049 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6050 ); \
6051 lval = (__typeof__(lval)) _res; \
6052 } while (0)
6053
6054 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6055 arg6,arg7,arg8,arg9,arg10, \
6056 arg11,arg12) \
6057 do { \
6058 volatile OrigFn _orig = (orig); \
6059 volatile unsigned long _argvec[13]; \
6060 volatile unsigned long _res; \
6061 _argvec[0] = (unsigned long)_orig.nraddr; \
6062 _argvec[1] = (unsigned long)(arg1); \
6063 _argvec[2] = (unsigned long)(arg2); \
6064 _argvec[3] = (unsigned long)(arg3); \
6065 _argvec[4] = (unsigned long)(arg4); \
6066 _argvec[5] = (unsigned long)(arg5); \
6067 _argvec[6] = (unsigned long)(arg6); \
6068 _argvec[7] = (unsigned long)(arg7); \
6069 _argvec[8] = (unsigned long)(arg8); \
6070 _argvec[9] = (unsigned long)(arg9); \
6071 _argvec[10] = (unsigned long)(arg10); \
6072 _argvec[11] = (unsigned long)(arg11); \
6073 _argvec[12] = (unsigned long)(arg12); \
6074 __asm__ volatile( \
6075 "dsubu $29, $29, 32\n\t" \
6076 "ld $4, 72(%1)\n\t" \
6077 "sd $4, 0($29)\n\t" \
6078 "ld $4, 80(%1)\n\t" \
6079 "sd $4, 8($29)\n\t" \
6080 "ld $4, 88(%1)\n\t" \
6081 "sd $4, 16($29)\n\t" \
6082 "ld $4, 96(%1)\n\t" \
6083 "sd $4, 24($29)\n\t" \
6084 "ld $4, 8(%1)\n\t" \
6085 "ld $5, 16(%1)\n\t" \
6086 "ld $6, 24(%1)\n\t" \
6087 "ld $7, 32(%1)\n\t" \
6088 "ld $8, 40(%1)\n\t" \
6089 "ld $9, 48(%1)\n\t" \
6090 "ld $10, 56(%1)\n\t" \
6091 "ld $11, 64(%1)\n\t" \
6092 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6093 VALGRIND_CALL_NOREDIR_T9 \
6094 "daddu $29, $29, 32\n\t" \
6095 "move %0, $2\n" \
6096 : /*out*/ "=r" (_res) \
6097 : /*in*/ "r" (&_argvec[0]) \
6098 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6099 ); \
6100 lval = (__typeof__(lval)) _res; \
6101 } while (0)
6102
6103 #endif /* PLAT_mips64_linux */
6104
6105
6106 /* ------------------------------------------------------------------ */
6107 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6108 /* */
6109 /* ------------------------------------------------------------------ */
6110
6111 /* Some request codes. There are many more of these, but most are not
6112 exposed to end-user view. These are the public ones, all of the
6113 form 0x1000 + small_number.
6114
6115 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6116 ones start at 0x2000.
6117 */
6118
6119 /* These macros are used by tools -- they must be public, but don't
6120 embed them into other programs. */
6121 #define VG_USERREQ_TOOL_BASE(a,b) \
6122 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6123 #define VG_IS_TOOL_USERREQ(a, b, v) \
6124 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6125
6126 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6127 This enum comprises an ABI exported by Valgrind to programs
6128 which use client requests. DO NOT CHANGE THE ORDER OF THESE
6129 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
6130 typedef
6131 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6132 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6133
6134 /* These allow any function to be called from the simulated
6135 CPU but run on the real CPU. Nb: the first arg passed to
6136 the function is always the ThreadId of the running
6137 thread! So CLIENT_CALL0 actually requires a 1 arg
6138 function, etc. */
6139 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6140 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6141 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6142 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6143
6144 /* Can be useful in regression testing suites -- eg. can
6145 send Valgrind's output to /dev/null and still count
6146 errors. */
6147 VG_USERREQ__COUNT_ERRORS = 0x1201,
6148
6149 /* Allows the client program and/or gdbserver to execute a monitor
6150 command. */
6151 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6152
6153 /* These are useful and can be interpreted by any tool that
6154 tracks malloc() et al, by using vg_replace_malloc.c. */
6155 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6156 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6157 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6158 /* Memory pool support. */
6159 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6160 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6161 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6162 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6163 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6164 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6165 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6166 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6167
6168 /* Allow printfs to valgrind log. */
6169 /* The first two pass the va_list argument by value, which
6170 assumes it is the same size as or smaller than a UWord,
6171 which generally isn't the case. Hence are deprecated.
6172 The second two pass the vargs by reference and so are
6173 immune to this problem. */
6174 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6175 VG_USERREQ__PRINTF = 0x1401,
6176 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6177 /* both :: char* fmt, va_list* vargs */
6178 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6179 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6180
6181 /* Stack support. */
6182 VG_USERREQ__STACK_REGISTER = 0x1501,
6183 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6184 VG_USERREQ__STACK_CHANGE = 0x1503,
6185
6186 /* Wine support */
6187 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6188
6189 /* Querying of debug info. */
6190 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6191
6192 /* Disable/enable error reporting level. Takes a single
6193 Word arg which is the delta to this thread's error
6194 disablement indicator. Hence 1 disables or further
6195 disables errors, and -1 moves back towards enablement.
6196 Other values are not allowed. */
6197 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6198
6199 /* Initialise IR injection */
6200 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
6201 } Vg_ClientRequest;
6202
6203 #if !defined(__GNUC__)
6204 # define __extension__ /* */
6205 #endif
6206
6207
6208 /* Returns the number of Valgrinds this code is running under. That
6209 is, 0 if running natively, 1 if running under Valgrind, 2 if
6210 running under Valgrind which is running under another Valgrind,
6211 etc. */
6212 #define RUNNING_ON_VALGRIND \
6213 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6214 VG_USERREQ__RUNNING_ON_VALGRIND, \
6215 0, 0, 0, 0, 0) \
6216
6217
6218 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6219 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6220 since it provides a way to make sure valgrind will retranslate the
6221 invalidated area. Returns no value. */
6222 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6223 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6224 _qzz_addr, _qzz_len, 0, 0, 0)
6225
6226
6227 /* These requests are for getting Valgrind itself to print something.
6228 Possibly with a backtrace. This is a really ugly hack. The return value
6229 is the number of characters printed, excluding the "**<pid>** " part at the
6230 start and the backtrace (if present). */
6231
6232 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6233 /* Modern GCC will optimize the static routine out if unused,
6234 and unused attribute will shut down warnings about it. */
6235 static int VALGRIND_PRINTF(const char *format, ...)
6236 __attribute__((format(__printf__, 1, 2), __unused__));
6237 #endif
6238 static int
6239 #if defined(_MSC_VER)
6240 __inline
6241 #endif
VALGRIND_PRINTF(const char * format,...)6242 VALGRIND_PRINTF(const char *format, ...)
6243 {
6244 #if defined(NVALGRIND)
6245 return 0;
6246 #else /* NVALGRIND */
6247 #if defined(_MSC_VER) || defined(__MINGW64__)
6248 uintptr_t _qzz_res;
6249 #else
6250 unsigned long _qzz_res;
6251 #endif
6252 va_list vargs;
6253 va_start(vargs, format);
6254 #if defined(_MSC_VER) || defined(__MINGW64__)
6255 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6256 VG_USERREQ__PRINTF_VALIST_BY_REF,
6257 (uintptr_t)format,
6258 (uintptr_t)&vargs,
6259 0, 0, 0);
6260 #else
6261 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6262 VG_USERREQ__PRINTF_VALIST_BY_REF,
6263 (unsigned long)format,
6264 (unsigned long)&vargs,
6265 0, 0, 0);
6266 #endif
6267 va_end(vargs);
6268 return (int)_qzz_res;
6269 #endif /* NVALGRIND */
6270 }
6271
6272 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6273 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6274 __attribute__((format(__printf__, 1, 2), __unused__));
6275 #endif
6276 static int
6277 #if defined(_MSC_VER)
6278 __inline
6279 #endif
VALGRIND_PRINTF_BACKTRACE(const char * format,...)6280 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6281 {
6282 #if defined(NVALGRIND)
6283 return 0;
6284 #else /* NVALGRIND */
6285 #if defined(_MSC_VER) || defined(__MINGW64__)
6286 uintptr_t _qzz_res;
6287 #else
6288 unsigned long _qzz_res;
6289 #endif
6290 va_list vargs;
6291 va_start(vargs, format);
6292 #if defined(_MSC_VER) || defined(__MINGW64__)
6293 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6294 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6295 (uintptr_t)format,
6296 (uintptr_t)&vargs,
6297 0, 0, 0);
6298 #else
6299 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6300 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6301 (unsigned long)format,
6302 (unsigned long)&vargs,
6303 0, 0, 0);
6304 #endif
6305 va_end(vargs);
6306 return (int)_qzz_res;
6307 #endif /* NVALGRIND */
6308 }
6309
6310
6311 /* These requests allow control to move from the simulated CPU to the
6312 real CPU, calling an arbitary function.
6313
6314 Note that the current ThreadId is inserted as the first argument.
6315 So this call:
6316
6317 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6318
6319 requires f to have this signature:
6320
6321 Word f(Word tid, Word arg1, Word arg2)
6322
6323 where "Word" is a word-sized type.
6324
6325 Note that these client requests are not entirely reliable. For example,
6326 if you call a function with them that subsequently calls printf(),
6327 there's a high chance Valgrind will crash. Generally, your prospects of
6328 these working are made higher if the called function does not refer to
6329 any global variables, and does not refer to any libc or other functions
6330 (printf et al). Any kind of entanglement with libc or dynamic linking is
6331 likely to have a bad outcome, for tricky reasons which we've grappled
6332 with a lot in the past.
6333 */
6334 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6335 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6336 VG_USERREQ__CLIENT_CALL0, \
6337 _qyy_fn, \
6338 0, 0, 0, 0)
6339
6340 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6341 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6342 VG_USERREQ__CLIENT_CALL1, \
6343 _qyy_fn, \
6344 _qyy_arg1, 0, 0, 0)
6345
6346 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6347 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6348 VG_USERREQ__CLIENT_CALL2, \
6349 _qyy_fn, \
6350 _qyy_arg1, _qyy_arg2, 0, 0)
6351
6352 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6353 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6354 VG_USERREQ__CLIENT_CALL3, \
6355 _qyy_fn, \
6356 _qyy_arg1, _qyy_arg2, \
6357 _qyy_arg3, 0)
6358
6359
6360 /* Counts the number of errors that have been recorded by a tool. Nb:
6361 the tool must record the errors with VG_(maybe_record_error)() or
6362 VG_(unique_error)() for them to be counted. */
6363 #define VALGRIND_COUNT_ERRORS \
6364 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6365 0 /* default return */, \
6366 VG_USERREQ__COUNT_ERRORS, \
6367 0, 0, 0, 0, 0)
6368
6369 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6370 when heap blocks are allocated in order to give accurate results. This
6371 happens automatically for the standard allocator functions such as
6372 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6373 delete[], etc.
6374
6375 But if your program uses a custom allocator, this doesn't automatically
6376 happen, and Valgrind will not do as well. For example, if you allocate
6377 superblocks with mmap() and then allocates chunks of the superblocks, all
6378 Valgrind's observations will be at the mmap() level and it won't know that
6379 the chunks should be considered separate entities. In Memcheck's case,
6380 that means you probably won't get heap block overrun detection (because
6381 there won't be redzones marked as unaddressable) and you definitely won't
6382 get any leak detection.
6383
6384 The following client requests allow a custom allocator to be annotated so
6385 that it can be handled accurately by Valgrind.
6386
6387 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6388 by a malloc()-like function. For Memcheck (an illustrative case), this
6389 does two things:
6390
6391 - It records that the block has been allocated. This means any addresses
6392 within the block mentioned in error messages will be
6393 identified as belonging to the block. It also means that if the block
6394 isn't freed it will be detected by the leak checker.
6395
6396 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6397 not set), or addressable and defined (if 'is_zeroed' is set). This
6398 controls how accesses to the block by the program are handled.
6399
6400 'addr' is the start of the usable block (ie. after any
6401 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6402 can apply redzones -- these are blocks of padding at the start and end of
6403 each block. Adding redzones is recommended as it makes it much more likely
6404 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6405 zeroed (or filled with another predictable value), as is the case for
6406 calloc().
6407
6408 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6409 heap block -- that will be used by the client program -- is allocated.
6410 It's best to put it at the outermost level of the allocator if possible;
6411 for example, if you have a function my_alloc() which calls
6412 internal_alloc(), and the client request is put inside internal_alloc(),
6413 stack traces relating to the heap block will contain entries for both
6414 my_alloc() and internal_alloc(), which is probably not what you want.
6415
6416 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6417 custom blocks from within a heap block, B, that has been allocated with
6418 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6419 -- the custom blocks will take precedence.
6420
6421 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6422 Memcheck, it does two things:
6423
6424 - It records that the block has been deallocated. This assumes that the
6425 block was annotated as having been allocated via
6426 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6427
6428 - It marks the block as being unaddressable.
6429
6430 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6431 heap block is deallocated.
6432
6433 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6434 Memcheck, it does four things:
6435
6436 - It records that the size of a block has been changed. This assumes that
6437 the block was annotated as having been allocated via
6438 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6439
6440 - If the block shrunk, it marks the freed memory as being unaddressable.
6441
6442 - If the block grew, it marks the new area as undefined and defines a red
6443 zone past the end of the new block.
6444
6445 - The V-bits of the overlap between the old and the new block are preserved.
6446
6447 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6448 and before deallocation of the old block.
6449
6450 In many cases, these three client requests will not be enough to get your
6451 allocator working well with Memcheck. More specifically, if your allocator
6452 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6453 will be necessary to mark the memory as addressable just before the zeroing
6454 occurs, otherwise you'll get a lot of invalid write errors. For example,
6455 you'll need to do this if your allocator recycles freed blocks, but it
6456 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6457 Alternatively, if your allocator reuses freed blocks for allocator-internal
6458 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6459
6460 Really, what's happening is a blurring of the lines between the client
6461 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6462 memory should be considered unaddressable to the client program, but the
6463 allocator knows more than the rest of the client program and so may be able
6464 to safely access it. Extra client requests are necessary for Valgrind to
6465 understand the distinction between the allocator and the rest of the
6466 program.
6467
6468 Ignored if addr == 0.
6469 */
6470 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6471 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6472 addr, sizeB, rzB, is_zeroed, 0)
6473
6474 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6475 Ignored if addr == 0.
6476 */
6477 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6478 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6479 addr, oldSizeB, newSizeB, rzB, 0)
6480
6481 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6482 Ignored if addr == 0.
6483 */
6484 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6485 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6486 addr, rzB, 0, 0, 0)
6487
6488 /* Create a memory pool. */
6489 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6490 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6491 pool, rzB, is_zeroed, 0, 0)
6492
6493 /* Destroy a memory pool. */
6494 #define VALGRIND_DESTROY_MEMPOOL(pool) \
6495 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6496 pool, 0, 0, 0, 0)
6497
6498 /* Associate a piece of memory with a memory pool. */
6499 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6500 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6501 pool, addr, size, 0, 0)
6502
6503 /* Disassociate a piece of memory from a memory pool. */
6504 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
6505 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6506 pool, addr, 0, 0, 0)
6507
6508 /* Disassociate any pieces outside a particular range. */
6509 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6510 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
6511 pool, addr, size, 0, 0)
6512
6513 /* Resize and/or move a piece associated with a memory pool. */
6514 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
6515 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
6516 poolA, poolB, 0, 0, 0)
6517
6518 /* Resize and/or move a piece associated with a memory pool. */
6519 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
6520 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
6521 pool, addrA, addrB, size, 0)
6522
6523 /* Return 1 if a mempool exists, else 0. */
6524 #define VALGRIND_MEMPOOL_EXISTS(pool) \
6525 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6526 VG_USERREQ__MEMPOOL_EXISTS, \
6527 pool, 0, 0, 0, 0)
6528
6529 /* Mark a piece of memory as being a stack. Returns a stack id.
6530 start is the lowest addressable stack byte, end is the highest
6531 addressable stack byte. */
6532 #define VALGRIND_STACK_REGISTER(start, end) \
6533 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6534 VG_USERREQ__STACK_REGISTER, \
6535 start, end, 0, 0, 0)
6536
6537 /* Unmark the piece of memory associated with a stack id as being a
6538 stack. */
6539 #define VALGRIND_STACK_DEREGISTER(id) \
6540 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
6541 id, 0, 0, 0, 0)
6542
6543 /* Change the start and end address of the stack id.
6544 start is the new lowest addressable stack byte, end is the new highest
6545 addressable stack byte. */
6546 #define VALGRIND_STACK_CHANGE(id, start, end) \
6547 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
6548 id, start, end, 0, 0)
6549
6550 /* Load PDB debug info for Wine PE image_map. */
6551 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
6552 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
6553 fd, ptr, total_size, delta, 0)
6554
6555 /* Map a code address to a source file name and line number. buf64
6556 must point to a 64-byte buffer in the caller's address space. The
6557 result will be dumped in there and is guaranteed to be zero
6558 terminated. If no info is found, the first byte is set to zero. */
6559 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
6560 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6561 VG_USERREQ__MAP_IP_TO_SRCLOC, \
6562 addr, buf64, 0, 0, 0)
6563
6564 /* Disable error reporting for this thread. Behaves in a stack like
6565 way, so you can safely call this multiple times provided that
6566 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
6567 to re-enable reporting. The first call of this macro disables
6568 reporting. Subsequent calls have no effect except to increase the
6569 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
6570 reporting. Child threads do not inherit this setting from their
6571 parents -- they are always created with reporting enabled. */
6572 #define VALGRIND_DISABLE_ERROR_REPORTING \
6573 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6574 1, 0, 0, 0, 0)
6575
6576 /* Re-enable error reporting, as per comments on
6577 VALGRIND_DISABLE_ERROR_REPORTING. */
6578 #define VALGRIND_ENABLE_ERROR_REPORTING \
6579 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6580 -1, 0, 0, 0, 0)
6581
6582 /* Execute a monitor command from the client program.
6583 If a connection is opened with GDB, the output will be sent
6584 according to the output mode set for vgdb.
6585 If no connection is opened, output will go to the log output.
6586 Returns 1 if command not recognised, 0 otherwise. */
6587 #define VALGRIND_MONITOR_COMMAND(command) \
6588 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
6589 command, 0, 0, 0, 0)
6590
6591
6592 #undef PLAT_x86_darwin
6593 #undef PLAT_amd64_darwin
6594 #undef PLAT_x86_win32
6595 #undef PLAT_amd64_win64
6596 #undef PLAT_x86_linux
6597 #undef PLAT_amd64_linux
6598 #undef PLAT_ppc32_linux
6599 #undef PLAT_ppc64be_linux
6600 #undef PLAT_ppc64le_linux
6601 #undef PLAT_arm_linux
6602 #undef PLAT_s390x_linux
6603 #undef PLAT_mips32_linux
6604 #undef PLAT_mips64_linux
6605
6606 #endif /* __VALGRIND_H */
6607