1*a9643ea8Slogwang
2*a9643ea8Slogwang /**
3*a9643ea8Slogwang * Tencent is pleased to support the open source community by making MSEC available.
4*a9643ea8Slogwang *
5*a9643ea8Slogwang * Copyright (C) 2016 THL A29 Limited, a Tencent company. All rights reserved.
6*a9643ea8Slogwang *
7*a9643ea8Slogwang * Licensed under the GNU General Public License, Version 2.0 (the "License");
8*a9643ea8Slogwang * you may not use this file except in compliance with the License. You may
9*a9643ea8Slogwang * obtain a copy of the License at
10*a9643ea8Slogwang *
11*a9643ea8Slogwang * https://opensource.org/licenses/GPL-2.0
12*a9643ea8Slogwang *
13*a9643ea8Slogwang * Unless required by applicable law or agreed to in writing, software distributed under the
14*a9643ea8Slogwang * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
15*a9643ea8Slogwang * either express or implied. See the License for the specific language governing permissions
16*a9643ea8Slogwang * and limitations under the License.
17*a9643ea8Slogwang */
18*a9643ea8Slogwang
19*a9643ea8Slogwang
20*a9643ea8Slogwang /* -*- c -*-
21*a9643ea8Slogwang ----------------------------------------------------------------
22*a9643ea8Slogwang
23*a9643ea8Slogwang Notice that the following BSD-style license applies to this one
24*a9643ea8Slogwang file (valgrind.h) only. The rest of Valgrind is licensed under the
25*a9643ea8Slogwang terms of the GNU General Public License, version 2, unless
26*a9643ea8Slogwang otherwise indicated. See the COPYING file in the source
27*a9643ea8Slogwang distribution for details.
28*a9643ea8Slogwang
29*a9643ea8Slogwang ----------------------------------------------------------------
30*a9643ea8Slogwang
31*a9643ea8Slogwang This file is part of Valgrind, a dynamic binary instrumentation
32*a9643ea8Slogwang framework.
33*a9643ea8Slogwang
34*a9643ea8Slogwang Copyright (C) 2000-2013 Julian Seward. All rights reserved.
35*a9643ea8Slogwang
36*a9643ea8Slogwang Redistribution and use in source and binary forms, with or without
37*a9643ea8Slogwang modification, are permitted provided that the following conditions
38*a9643ea8Slogwang are met:
39*a9643ea8Slogwang
40*a9643ea8Slogwang 1. Redistributions of source code must retain the above copyright
41*a9643ea8Slogwang notice, this list of conditions and the following disclaimer.
42*a9643ea8Slogwang
43*a9643ea8Slogwang 2. The origin of this software must not be misrepresented; you must
44*a9643ea8Slogwang not claim that you wrote the original software. If you use this
45*a9643ea8Slogwang software in a product, an acknowledgment in the product
46*a9643ea8Slogwang documentation would be appreciated but is not required.
47*a9643ea8Slogwang
48*a9643ea8Slogwang 3. Altered source versions must be plainly marked as such, and must
49*a9643ea8Slogwang not be misrepresented as being the original software.
50*a9643ea8Slogwang
51*a9643ea8Slogwang 4. The name of the author may not be used to endorse or promote
52*a9643ea8Slogwang products derived from this software without specific prior written
53*a9643ea8Slogwang permission.
54*a9643ea8Slogwang
55*a9643ea8Slogwang THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
56*a9643ea8Slogwang OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
57*a9643ea8Slogwang WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
58*a9643ea8Slogwang ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
59*a9643ea8Slogwang DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
60*a9643ea8Slogwang DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
61*a9643ea8Slogwang GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
62*a9643ea8Slogwang INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
63*a9643ea8Slogwang WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
64*a9643ea8Slogwang NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
65*a9643ea8Slogwang SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
66*a9643ea8Slogwang
67*a9643ea8Slogwang ----------------------------------------------------------------
68*a9643ea8Slogwang
69*a9643ea8Slogwang Notice that the above BSD-style license applies to this one file
70*a9643ea8Slogwang (valgrind.h) only. The entire rest of Valgrind is licensed under
71*a9643ea8Slogwang the terms of the GNU General Public License, version 2. See the
72*a9643ea8Slogwang COPYING file in the source distribution for details.
73*a9643ea8Slogwang
74*a9643ea8Slogwang ----------------------------------------------------------------
75*a9643ea8Slogwang */
76*a9643ea8Slogwang
77*a9643ea8Slogwang
78*a9643ea8Slogwang /* This file is for inclusion into client (your!) code.
79*a9643ea8Slogwang
80*a9643ea8Slogwang You can use these macros to manipulate and query Valgrind's
81*a9643ea8Slogwang execution inside your own programs.
82*a9643ea8Slogwang
83*a9643ea8Slogwang The resulting executables will still run without Valgrind, just a
84*a9643ea8Slogwang little bit more slowly than they otherwise would, but otherwise
85*a9643ea8Slogwang unchanged. When not running on valgrind, each client request
86*a9643ea8Slogwang consumes very few (eg. 7) instructions, so the resulting performance
87*a9643ea8Slogwang loss is negligible unless you plan to execute client requests
88*a9643ea8Slogwang millions of times per second. Nevertheless, if that is still a
89*a9643ea8Slogwang problem, you can compile with the NVALGRIND symbol defined (gcc
90*a9643ea8Slogwang -DNVALGRIND) so that client requests are not even compiled in. */
91*a9643ea8Slogwang
92*a9643ea8Slogwang #ifndef __VALGRIND_H
93*a9643ea8Slogwang #define __VALGRIND_H
94*a9643ea8Slogwang
95*a9643ea8Slogwang
96*a9643ea8Slogwang /* ------------------------------------------------------------------ */
97*a9643ea8Slogwang /* VERSION NUMBER OF VALGRIND */
98*a9643ea8Slogwang /* ------------------------------------------------------------------ */
99*a9643ea8Slogwang
100*a9643ea8Slogwang /* Specify Valgrind's version number, so that user code can
101*a9643ea8Slogwang conditionally compile based on our version number. Note that these
102*a9643ea8Slogwang were introduced at version 3.6 and so do not exist in version 3.5
103*a9643ea8Slogwang or earlier. The recommended way to use them to check for "version
104*a9643ea8Slogwang X.Y or later" is (eg)
105*a9643ea8Slogwang
106*a9643ea8Slogwang #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
107*a9643ea8Slogwang && (__VALGRIND_MAJOR__ > 3 \
108*a9643ea8Slogwang || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
109*a9643ea8Slogwang */
110*a9643ea8Slogwang #define __VALGRIND_MAJOR__ 3
111*a9643ea8Slogwang #define __VALGRIND_MINOR__ 10
112*a9643ea8Slogwang
113*a9643ea8Slogwang
114*a9643ea8Slogwang #include <stdarg.h>
115*a9643ea8Slogwang
116*a9643ea8Slogwang /* Nb: this file might be included in a file compiled with -ansi. So
117*a9643ea8Slogwang we can't use C++ style "//" comments nor the "asm" keyword (instead
118*a9643ea8Slogwang use "__asm__"). */
119*a9643ea8Slogwang
120*a9643ea8Slogwang /* Derive some tags indicating what the target platform is. Note
121*a9643ea8Slogwang that in this file we're using the compiler's CPP symbols for
122*a9643ea8Slogwang identifying architectures, which are different to the ones we use
123*a9643ea8Slogwang within the rest of Valgrind. Note, __powerpc__ is active for both
124*a9643ea8Slogwang 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
125*a9643ea8Slogwang latter (on Linux, that is).
126*a9643ea8Slogwang
127*a9643ea8Slogwang Misc note: how to find out what's predefined in gcc by default:
128*a9643ea8Slogwang gcc -Wp,-dM somefile.c
129*a9643ea8Slogwang */
130*a9643ea8Slogwang #undef PLAT_x86_darwin
131*a9643ea8Slogwang #undef PLAT_amd64_darwin
132*a9643ea8Slogwang #undef PLAT_x86_win32
133*a9643ea8Slogwang #undef PLAT_amd64_win64
134*a9643ea8Slogwang #undef PLAT_x86_linux
135*a9643ea8Slogwang #undef PLAT_amd64_linux
136*a9643ea8Slogwang #undef PLAT_ppc32_linux
137*a9643ea8Slogwang #undef PLAT_ppc64be_linux
138*a9643ea8Slogwang #undef PLAT_ppc64le_linux
139*a9643ea8Slogwang #undef PLAT_arm_linux
140*a9643ea8Slogwang #undef PLAT_arm64_linux
141*a9643ea8Slogwang #undef PLAT_s390x_linux
142*a9643ea8Slogwang #undef PLAT_mips32_linux
143*a9643ea8Slogwang #undef PLAT_mips64_linux
144*a9643ea8Slogwang
145*a9643ea8Slogwang
146*a9643ea8Slogwang #if defined(__APPLE__) && defined(__i386__)
147*a9643ea8Slogwang # define PLAT_x86_darwin 1
148*a9643ea8Slogwang #elif defined(__APPLE__) && defined(__x86_64__)
149*a9643ea8Slogwang # define PLAT_amd64_darwin 1
150*a9643ea8Slogwang #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
151*a9643ea8Slogwang || defined(__CYGWIN32__) \
152*a9643ea8Slogwang || (defined(_WIN32) && defined(_M_IX86))
153*a9643ea8Slogwang # define PLAT_x86_win32 1
154*a9643ea8Slogwang #elif defined(__MINGW64__) \
155*a9643ea8Slogwang || (defined(_WIN64) && defined(_M_X64))
156*a9643ea8Slogwang # define PLAT_amd64_win64 1
157*a9643ea8Slogwang #elif defined(__linux__) && defined(__i386__)
158*a9643ea8Slogwang # define PLAT_x86_linux 1
159*a9643ea8Slogwang #elif defined(__linux__) && defined(__x86_64__)
160*a9643ea8Slogwang # define PLAT_amd64_linux 1
161*a9643ea8Slogwang #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
162*a9643ea8Slogwang # define PLAT_ppc32_linux 1
163*a9643ea8Slogwang #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
164*a9643ea8Slogwang /* Big Endian uses ELF version 1 */
165*a9643ea8Slogwang # define PLAT_ppc64be_linux 1
166*a9643ea8Slogwang #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
167*a9643ea8Slogwang /* Little Endian uses ELF version 2 */
168*a9643ea8Slogwang # define PLAT_ppc64le_linux 1
169*a9643ea8Slogwang #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
170*a9643ea8Slogwang # define PLAT_arm_linux 1
171*a9643ea8Slogwang #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
172*a9643ea8Slogwang # define PLAT_arm64_linux 1
173*a9643ea8Slogwang #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
174*a9643ea8Slogwang # define PLAT_s390x_linux 1
175*a9643ea8Slogwang #elif defined(__linux__) && defined(__mips__) && (__mips==64)
176*a9643ea8Slogwang # define PLAT_mips64_linux 1
177*a9643ea8Slogwang #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
178*a9643ea8Slogwang # define PLAT_mips32_linux 1
179*a9643ea8Slogwang #else
180*a9643ea8Slogwang /* If we're not compiling for our target platform, don't generate
181*a9643ea8Slogwang any inline asms. */
182*a9643ea8Slogwang # if !defined(NVALGRIND)
183*a9643ea8Slogwang # define NVALGRIND 1
184*a9643ea8Slogwang # endif
185*a9643ea8Slogwang #endif
186*a9643ea8Slogwang
187*a9643ea8Slogwang
188*a9643ea8Slogwang /* ------------------------------------------------------------------ */
189*a9643ea8Slogwang /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
190*a9643ea8Slogwang /* in here of use to end-users -- skip to the next section. */
191*a9643ea8Slogwang /* ------------------------------------------------------------------ */
192*a9643ea8Slogwang
193*a9643ea8Slogwang /*
194*a9643ea8Slogwang * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
195*a9643ea8Slogwang * request. Accepts both pointers and integers as arguments.
196*a9643ea8Slogwang *
197*a9643ea8Slogwang * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
198*a9643ea8Slogwang * client request that does not return a value.
199*a9643ea8Slogwang
200*a9643ea8Slogwang * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
201*a9643ea8Slogwang * client request and whose value equals the client request result. Accepts
202*a9643ea8Slogwang * both pointers and integers as arguments. Note that such calls are not
203*a9643ea8Slogwang * necessarily pure functions -- they may have side effects.
204*a9643ea8Slogwang */
205*a9643ea8Slogwang
206*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
207*a9643ea8Slogwang _zzq_request, _zzq_arg1, _zzq_arg2, \
208*a9643ea8Slogwang _zzq_arg3, _zzq_arg4, _zzq_arg5) \
209*a9643ea8Slogwang do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
210*a9643ea8Slogwang (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
211*a9643ea8Slogwang (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
212*a9643ea8Slogwang
213*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
214*a9643ea8Slogwang _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
215*a9643ea8Slogwang do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
216*a9643ea8Slogwang (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
217*a9643ea8Slogwang (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
218*a9643ea8Slogwang
219*a9643ea8Slogwang #if defined(NVALGRIND)
220*a9643ea8Slogwang
221*a9643ea8Slogwang /* Define NVALGRIND to completely remove the Valgrind magic sequence
222*a9643ea8Slogwang from the compiled code (analogous to NDEBUG's effects on
223*a9643ea8Slogwang assert()) */
224*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
225*a9643ea8Slogwang _zzq_default, _zzq_request, \
226*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
227*a9643ea8Slogwang (_zzq_default)
228*a9643ea8Slogwang
229*a9643ea8Slogwang #else /* ! NVALGRIND */
230*a9643ea8Slogwang
231*a9643ea8Slogwang /* The following defines the magic code sequences which the JITter
232*a9643ea8Slogwang spots and handles magically. Don't look too closely at them as
233*a9643ea8Slogwang they will rot your brain.
234*a9643ea8Slogwang
235*a9643ea8Slogwang The assembly code sequences for all architectures is in this one
236*a9643ea8Slogwang file. This is because this file must be stand-alone, and we don't
237*a9643ea8Slogwang want to have multiple files.
238*a9643ea8Slogwang
239*a9643ea8Slogwang For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
240*a9643ea8Slogwang value gets put in the return slot, so that everything works when
241*a9643ea8Slogwang this is executed not under Valgrind. Args are passed in a memory
242*a9643ea8Slogwang block, and so there's no intrinsic limit to the number that could
243*a9643ea8Slogwang be passed, but it's currently five.
244*a9643ea8Slogwang
245*a9643ea8Slogwang The macro args are:
246*a9643ea8Slogwang _zzq_rlval result lvalue
247*a9643ea8Slogwang _zzq_default default value (result returned when running on real CPU)
248*a9643ea8Slogwang _zzq_request request code
249*a9643ea8Slogwang _zzq_arg1..5 request params
250*a9643ea8Slogwang
251*a9643ea8Slogwang The other two macros are used to support function wrapping, and are
252*a9643ea8Slogwang a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
253*a9643ea8Slogwang guest's NRADDR pseudo-register and whatever other information is
254*a9643ea8Slogwang needed to safely run the call original from the wrapper: on
255*a9643ea8Slogwang ppc64-linux, the R2 value at the divert point is also needed. This
256*a9643ea8Slogwang information is abstracted into a user-visible type, OrigFn.
257*a9643ea8Slogwang
258*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
259*a9643ea8Slogwang guest, but guarantees that the branch instruction will not be
260*a9643ea8Slogwang redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
261*a9643ea8Slogwang branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
262*a9643ea8Slogwang complete inline asm, since it needs to be combined with more magic
263*a9643ea8Slogwang inline asm stuff to be useful.
264*a9643ea8Slogwang */
265*a9643ea8Slogwang
266*a9643ea8Slogwang /* ------------------------- x86-{linux,darwin} ---------------- */
267*a9643ea8Slogwang
268*a9643ea8Slogwang #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
269*a9643ea8Slogwang || (defined(PLAT_x86_win32) && defined(__GNUC__))
270*a9643ea8Slogwang
271*a9643ea8Slogwang typedef
272*a9643ea8Slogwang struct {
273*a9643ea8Slogwang unsigned int nraddr; /* where's the code? */
274*a9643ea8Slogwang }
275*a9643ea8Slogwang OrigFn;
276*a9643ea8Slogwang
277*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
278*a9643ea8Slogwang "roll $3, %%edi ; roll $13, %%edi\n\t" \
279*a9643ea8Slogwang "roll $29, %%edi ; roll $19, %%edi\n\t"
280*a9643ea8Slogwang
281*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
282*a9643ea8Slogwang _zzq_default, _zzq_request, \
283*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
284*a9643ea8Slogwang __extension__ \
285*a9643ea8Slogwang ({volatile unsigned int _zzq_args[6]; \
286*a9643ea8Slogwang volatile unsigned int _zzq_result; \
287*a9643ea8Slogwang _zzq_args[0] = (unsigned int)(_zzq_request); \
288*a9643ea8Slogwang _zzq_args[1] = (unsigned int)(_zzq_arg1); \
289*a9643ea8Slogwang _zzq_args[2] = (unsigned int)(_zzq_arg2); \
290*a9643ea8Slogwang _zzq_args[3] = (unsigned int)(_zzq_arg3); \
291*a9643ea8Slogwang _zzq_args[4] = (unsigned int)(_zzq_arg4); \
292*a9643ea8Slogwang _zzq_args[5] = (unsigned int)(_zzq_arg5); \
293*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
294*a9643ea8Slogwang /* %EDX = client_request ( %EAX ) */ \
295*a9643ea8Slogwang "xchgl %%ebx,%%ebx" \
296*a9643ea8Slogwang : "=d" (_zzq_result) \
297*a9643ea8Slogwang : "a" (&_zzq_args[0]), "0" (_zzq_default) \
298*a9643ea8Slogwang : "cc", "memory" \
299*a9643ea8Slogwang ); \
300*a9643ea8Slogwang _zzq_result; \
301*a9643ea8Slogwang })
302*a9643ea8Slogwang
303*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
304*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
305*a9643ea8Slogwang volatile unsigned int __addr; \
306*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
307*a9643ea8Slogwang /* %EAX = guest_NRADDR */ \
308*a9643ea8Slogwang "xchgl %%ecx,%%ecx" \
309*a9643ea8Slogwang : "=a" (__addr) \
310*a9643ea8Slogwang : \
311*a9643ea8Slogwang : "cc", "memory" \
312*a9643ea8Slogwang ); \
313*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
314*a9643ea8Slogwang }
315*a9643ea8Slogwang
316*a9643ea8Slogwang #define VALGRIND_CALL_NOREDIR_EAX \
317*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
318*a9643ea8Slogwang /* call-noredir *%EAX */ \
319*a9643ea8Slogwang "xchgl %%edx,%%edx\n\t"
320*a9643ea8Slogwang
321*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
322*a9643ea8Slogwang do { \
323*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
324*a9643ea8Slogwang "xchgl %%edi,%%edi\n\t" \
325*a9643ea8Slogwang : : : "cc", "memory" \
326*a9643ea8Slogwang ); \
327*a9643ea8Slogwang } while (0)
328*a9643ea8Slogwang
329*a9643ea8Slogwang #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
330*a9643ea8Slogwang
331*a9643ea8Slogwang /* ------------------------- x86-Win32 ------------------------- */
332*a9643ea8Slogwang
333*a9643ea8Slogwang #if defined(PLAT_x86_win32) && !defined(__GNUC__)
334*a9643ea8Slogwang
335*a9643ea8Slogwang typedef
336*a9643ea8Slogwang struct {
337*a9643ea8Slogwang unsigned int nraddr; /* where's the code? */
338*a9643ea8Slogwang }
339*a9643ea8Slogwang OrigFn;
340*a9643ea8Slogwang
341*a9643ea8Slogwang #if defined(_MSC_VER)
342*a9643ea8Slogwang
343*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
344*a9643ea8Slogwang __asm rol edi, 3 __asm rol edi, 13 \
345*a9643ea8Slogwang __asm rol edi, 29 __asm rol edi, 19
346*a9643ea8Slogwang
347*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
348*a9643ea8Slogwang _zzq_default, _zzq_request, \
349*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
350*a9643ea8Slogwang valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
351*a9643ea8Slogwang (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
352*a9643ea8Slogwang (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
353*a9643ea8Slogwang (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
354*a9643ea8Slogwang
355*a9643ea8Slogwang static __inline uintptr_t
valgrind_do_client_request_expr(uintptr_t _zzq_default,uintptr_t _zzq_request,uintptr_t _zzq_arg1,uintptr_t _zzq_arg2,uintptr_t _zzq_arg3,uintptr_t _zzq_arg4,uintptr_t _zzq_arg5)356*a9643ea8Slogwang valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
357*a9643ea8Slogwang uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
358*a9643ea8Slogwang uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
359*a9643ea8Slogwang uintptr_t _zzq_arg5)
360*a9643ea8Slogwang {
361*a9643ea8Slogwang volatile uintptr_t _zzq_args[6];
362*a9643ea8Slogwang volatile unsigned int _zzq_result;
363*a9643ea8Slogwang _zzq_args[0] = (uintptr_t)(_zzq_request);
364*a9643ea8Slogwang _zzq_args[1] = (uintptr_t)(_zzq_arg1);
365*a9643ea8Slogwang _zzq_args[2] = (uintptr_t)(_zzq_arg2);
366*a9643ea8Slogwang _zzq_args[3] = (uintptr_t)(_zzq_arg3);
367*a9643ea8Slogwang _zzq_args[4] = (uintptr_t)(_zzq_arg4);
368*a9643ea8Slogwang _zzq_args[5] = (uintptr_t)(_zzq_arg5);
369*a9643ea8Slogwang __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
370*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE
371*a9643ea8Slogwang /* %EDX = client_request ( %EAX ) */
372*a9643ea8Slogwang __asm xchg ebx,ebx
373*a9643ea8Slogwang __asm mov _zzq_result, edx
374*a9643ea8Slogwang }
375*a9643ea8Slogwang return _zzq_result;
376*a9643ea8Slogwang }
377*a9643ea8Slogwang
378*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
379*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
380*a9643ea8Slogwang volatile unsigned int __addr; \
381*a9643ea8Slogwang __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
382*a9643ea8Slogwang /* %EAX = guest_NRADDR */ \
383*a9643ea8Slogwang __asm xchg ecx,ecx \
384*a9643ea8Slogwang __asm mov __addr, eax \
385*a9643ea8Slogwang } \
386*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
387*a9643ea8Slogwang }
388*a9643ea8Slogwang
389*a9643ea8Slogwang #define VALGRIND_CALL_NOREDIR_EAX ERROR
390*a9643ea8Slogwang
391*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
392*a9643ea8Slogwang do { \
393*a9643ea8Slogwang __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
394*a9643ea8Slogwang __asm xchg edi,edi \
395*a9643ea8Slogwang } \
396*a9643ea8Slogwang } while (0)
397*a9643ea8Slogwang
398*a9643ea8Slogwang #else
399*a9643ea8Slogwang #error Unsupported compiler.
400*a9643ea8Slogwang #endif
401*a9643ea8Slogwang
402*a9643ea8Slogwang #endif /* PLAT_x86_win32 */
403*a9643ea8Slogwang
404*a9643ea8Slogwang /* ------------------------ amd64-{linux,darwin} --------------- */
405*a9643ea8Slogwang
406*a9643ea8Slogwang #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
407*a9643ea8Slogwang || (defined(PLAT_amd64_win64) && defined(__GNUC__))
408*a9643ea8Slogwang
409*a9643ea8Slogwang typedef
410*a9643ea8Slogwang struct {
411*a9643ea8Slogwang unsigned long long int nraddr; /* where's the code? */
412*a9643ea8Slogwang }
413*a9643ea8Slogwang OrigFn;
414*a9643ea8Slogwang
415*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
416*a9643ea8Slogwang "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
417*a9643ea8Slogwang "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
418*a9643ea8Slogwang
419*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
420*a9643ea8Slogwang _zzq_default, _zzq_request, \
421*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
422*a9643ea8Slogwang __extension__ \
423*a9643ea8Slogwang ({ volatile unsigned long long int _zzq_args[6]; \
424*a9643ea8Slogwang volatile unsigned long long int _zzq_result; \
425*a9643ea8Slogwang _zzq_args[0] = (unsigned long long int)(_zzq_request); \
426*a9643ea8Slogwang _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
427*a9643ea8Slogwang _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
428*a9643ea8Slogwang _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
429*a9643ea8Slogwang _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
430*a9643ea8Slogwang _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
431*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
432*a9643ea8Slogwang /* %RDX = client_request ( %RAX ) */ \
433*a9643ea8Slogwang "xchgq %%rbx,%%rbx" \
434*a9643ea8Slogwang : "=d" (_zzq_result) \
435*a9643ea8Slogwang : "a" (&_zzq_args[0]), "0" (_zzq_default) \
436*a9643ea8Slogwang : "cc", "memory" \
437*a9643ea8Slogwang ); \
438*a9643ea8Slogwang _zzq_result; \
439*a9643ea8Slogwang })
440*a9643ea8Slogwang
441*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
442*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
443*a9643ea8Slogwang volatile unsigned long long int __addr; \
444*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
445*a9643ea8Slogwang /* %RAX = guest_NRADDR */ \
446*a9643ea8Slogwang "xchgq %%rcx,%%rcx" \
447*a9643ea8Slogwang : "=a" (__addr) \
448*a9643ea8Slogwang : \
449*a9643ea8Slogwang : "cc", "memory" \
450*a9643ea8Slogwang ); \
451*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
452*a9643ea8Slogwang }
453*a9643ea8Slogwang
454*a9643ea8Slogwang #define VALGRIND_CALL_NOREDIR_RAX \
455*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
456*a9643ea8Slogwang /* call-noredir *%RAX */ \
457*a9643ea8Slogwang "xchgq %%rdx,%%rdx\n\t"
458*a9643ea8Slogwang
459*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
460*a9643ea8Slogwang do { \
461*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
462*a9643ea8Slogwang "xchgq %%rdi,%%rdi\n\t" \
463*a9643ea8Slogwang : : : "cc", "memory" \
464*a9643ea8Slogwang ); \
465*a9643ea8Slogwang } while (0)
466*a9643ea8Slogwang
467*a9643ea8Slogwang #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
468*a9643ea8Slogwang
469*a9643ea8Slogwang /* ------------------------- amd64-Win64 ------------------------- */
470*a9643ea8Slogwang
471*a9643ea8Slogwang #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
472*a9643ea8Slogwang
473*a9643ea8Slogwang #error Unsupported compiler.
474*a9643ea8Slogwang
475*a9643ea8Slogwang #endif /* PLAT_amd64_win64 */
476*a9643ea8Slogwang
477*a9643ea8Slogwang /* ------------------------ ppc32-linux ------------------------ */
478*a9643ea8Slogwang
479*a9643ea8Slogwang #if defined(PLAT_ppc32_linux)
480*a9643ea8Slogwang
481*a9643ea8Slogwang typedef
482*a9643ea8Slogwang struct {
483*a9643ea8Slogwang unsigned int nraddr; /* where's the code? */
484*a9643ea8Slogwang }
485*a9643ea8Slogwang OrigFn;
486*a9643ea8Slogwang
487*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
488*a9643ea8Slogwang "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
489*a9643ea8Slogwang "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
490*a9643ea8Slogwang
491*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
492*a9643ea8Slogwang _zzq_default, _zzq_request, \
493*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
494*a9643ea8Slogwang \
495*a9643ea8Slogwang __extension__ \
496*a9643ea8Slogwang ({ unsigned int _zzq_args[6]; \
497*a9643ea8Slogwang unsigned int _zzq_result; \
498*a9643ea8Slogwang unsigned int* _zzq_ptr; \
499*a9643ea8Slogwang _zzq_args[0] = (unsigned int)(_zzq_request); \
500*a9643ea8Slogwang _zzq_args[1] = (unsigned int)(_zzq_arg1); \
501*a9643ea8Slogwang _zzq_args[2] = (unsigned int)(_zzq_arg2); \
502*a9643ea8Slogwang _zzq_args[3] = (unsigned int)(_zzq_arg3); \
503*a9643ea8Slogwang _zzq_args[4] = (unsigned int)(_zzq_arg4); \
504*a9643ea8Slogwang _zzq_args[5] = (unsigned int)(_zzq_arg5); \
505*a9643ea8Slogwang _zzq_ptr = _zzq_args; \
506*a9643ea8Slogwang __asm__ volatile("mr 3,%1\n\t" /*default*/ \
507*a9643ea8Slogwang "mr 4,%2\n\t" /*ptr*/ \
508*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
509*a9643ea8Slogwang /* %R3 = client_request ( %R4 ) */ \
510*a9643ea8Slogwang "or 1,1,1\n\t" \
511*a9643ea8Slogwang "mr %0,3" /*result*/ \
512*a9643ea8Slogwang : "=b" (_zzq_result) \
513*a9643ea8Slogwang : "b" (_zzq_default), "b" (_zzq_ptr) \
514*a9643ea8Slogwang : "cc", "memory", "r3", "r4"); \
515*a9643ea8Slogwang _zzq_result; \
516*a9643ea8Slogwang })
517*a9643ea8Slogwang
518*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
519*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
520*a9643ea8Slogwang unsigned int __addr; \
521*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
522*a9643ea8Slogwang /* %R3 = guest_NRADDR */ \
523*a9643ea8Slogwang "or 2,2,2\n\t" \
524*a9643ea8Slogwang "mr %0,3" \
525*a9643ea8Slogwang : "=b" (__addr) \
526*a9643ea8Slogwang : \
527*a9643ea8Slogwang : "cc", "memory", "r3" \
528*a9643ea8Slogwang ); \
529*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
530*a9643ea8Slogwang }
531*a9643ea8Slogwang
532*a9643ea8Slogwang #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
533*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
534*a9643ea8Slogwang /* branch-and-link-to-noredir *%R11 */ \
535*a9643ea8Slogwang "or 3,3,3\n\t"
536*a9643ea8Slogwang
537*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
538*a9643ea8Slogwang do { \
539*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
540*a9643ea8Slogwang "or 5,5,5\n\t" \
541*a9643ea8Slogwang ); \
542*a9643ea8Slogwang } while (0)
543*a9643ea8Slogwang
544*a9643ea8Slogwang #endif /* PLAT_ppc32_linux */
545*a9643ea8Slogwang
546*a9643ea8Slogwang /* ------------------------ ppc64-linux ------------------------ */
547*a9643ea8Slogwang
548*a9643ea8Slogwang #if defined(PLAT_ppc64be_linux)
549*a9643ea8Slogwang
550*a9643ea8Slogwang typedef
551*a9643ea8Slogwang struct {
552*a9643ea8Slogwang unsigned long long int nraddr; /* where's the code? */
553*a9643ea8Slogwang unsigned long long int r2; /* what tocptr do we need? */
554*a9643ea8Slogwang }
555*a9643ea8Slogwang OrigFn;
556*a9643ea8Slogwang
557*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
558*a9643ea8Slogwang "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
559*a9643ea8Slogwang "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
560*a9643ea8Slogwang
561*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
562*a9643ea8Slogwang _zzq_default, _zzq_request, \
563*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
564*a9643ea8Slogwang \
565*a9643ea8Slogwang __extension__ \
566*a9643ea8Slogwang ({ unsigned long long int _zzq_args[6]; \
567*a9643ea8Slogwang unsigned long long int _zzq_result; \
568*a9643ea8Slogwang unsigned long long int* _zzq_ptr; \
569*a9643ea8Slogwang _zzq_args[0] = (unsigned long long int)(_zzq_request); \
570*a9643ea8Slogwang _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
571*a9643ea8Slogwang _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
572*a9643ea8Slogwang _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
573*a9643ea8Slogwang _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
574*a9643ea8Slogwang _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
575*a9643ea8Slogwang _zzq_ptr = _zzq_args; \
576*a9643ea8Slogwang __asm__ volatile("mr 3,%1\n\t" /*default*/ \
577*a9643ea8Slogwang "mr 4,%2\n\t" /*ptr*/ \
578*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
579*a9643ea8Slogwang /* %R3 = client_request ( %R4 ) */ \
580*a9643ea8Slogwang "or 1,1,1\n\t" \
581*a9643ea8Slogwang "mr %0,3" /*result*/ \
582*a9643ea8Slogwang : "=b" (_zzq_result) \
583*a9643ea8Slogwang : "b" (_zzq_default), "b" (_zzq_ptr) \
584*a9643ea8Slogwang : "cc", "memory", "r3", "r4"); \
585*a9643ea8Slogwang _zzq_result; \
586*a9643ea8Slogwang })
587*a9643ea8Slogwang
588*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
589*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
590*a9643ea8Slogwang unsigned long long int __addr; \
591*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
592*a9643ea8Slogwang /* %R3 = guest_NRADDR */ \
593*a9643ea8Slogwang "or 2,2,2\n\t" \
594*a9643ea8Slogwang "mr %0,3" \
595*a9643ea8Slogwang : "=b" (__addr) \
596*a9643ea8Slogwang : \
597*a9643ea8Slogwang : "cc", "memory", "r3" \
598*a9643ea8Slogwang ); \
599*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
600*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
601*a9643ea8Slogwang /* %R3 = guest_NRADDR_GPR2 */ \
602*a9643ea8Slogwang "or 4,4,4\n\t" \
603*a9643ea8Slogwang "mr %0,3" \
604*a9643ea8Slogwang : "=b" (__addr) \
605*a9643ea8Slogwang : \
606*a9643ea8Slogwang : "cc", "memory", "r3" \
607*a9643ea8Slogwang ); \
608*a9643ea8Slogwang _zzq_orig->r2 = __addr; \
609*a9643ea8Slogwang }
610*a9643ea8Slogwang
611*a9643ea8Slogwang #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
612*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
613*a9643ea8Slogwang /* branch-and-link-to-noredir *%R11 */ \
614*a9643ea8Slogwang "or 3,3,3\n\t"
615*a9643ea8Slogwang
616*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
617*a9643ea8Slogwang do { \
618*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
619*a9643ea8Slogwang "or 5,5,5\n\t" \
620*a9643ea8Slogwang ); \
621*a9643ea8Slogwang } while (0)
622*a9643ea8Slogwang
623*a9643ea8Slogwang #endif /* PLAT_ppc64be_linux */
624*a9643ea8Slogwang
625*a9643ea8Slogwang #if defined(PLAT_ppc64le_linux)
626*a9643ea8Slogwang
627*a9643ea8Slogwang typedef
628*a9643ea8Slogwang struct {
629*a9643ea8Slogwang unsigned long long int nraddr; /* where's the code? */
630*a9643ea8Slogwang unsigned long long int r2; /* what tocptr do we need? */
631*a9643ea8Slogwang }
632*a9643ea8Slogwang OrigFn;
633*a9643ea8Slogwang
634*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
635*a9643ea8Slogwang "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
636*a9643ea8Slogwang "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
637*a9643ea8Slogwang
638*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
639*a9643ea8Slogwang _zzq_default, _zzq_request, \
640*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
641*a9643ea8Slogwang \
642*a9643ea8Slogwang __extension__ \
643*a9643ea8Slogwang ({ unsigned long long int _zzq_args[6]; \
644*a9643ea8Slogwang unsigned long long int _zzq_result; \
645*a9643ea8Slogwang unsigned long long int* _zzq_ptr; \
646*a9643ea8Slogwang _zzq_args[0] = (unsigned long long int)(_zzq_request); \
647*a9643ea8Slogwang _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
648*a9643ea8Slogwang _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
649*a9643ea8Slogwang _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
650*a9643ea8Slogwang _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
651*a9643ea8Slogwang _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
652*a9643ea8Slogwang _zzq_ptr = _zzq_args; \
653*a9643ea8Slogwang __asm__ volatile("mr 3,%1\n\t" /*default*/ \
654*a9643ea8Slogwang "mr 4,%2\n\t" /*ptr*/ \
655*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
656*a9643ea8Slogwang /* %R3 = client_request ( %R4 ) */ \
657*a9643ea8Slogwang "or 1,1,1\n\t" \
658*a9643ea8Slogwang "mr %0,3" /*result*/ \
659*a9643ea8Slogwang : "=b" (_zzq_result) \
660*a9643ea8Slogwang : "b" (_zzq_default), "b" (_zzq_ptr) \
661*a9643ea8Slogwang : "cc", "memory", "r3", "r4"); \
662*a9643ea8Slogwang _zzq_result; \
663*a9643ea8Slogwang })
664*a9643ea8Slogwang
665*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
666*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
667*a9643ea8Slogwang unsigned long long int __addr; \
668*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
669*a9643ea8Slogwang /* %R3 = guest_NRADDR */ \
670*a9643ea8Slogwang "or 2,2,2\n\t" \
671*a9643ea8Slogwang "mr %0,3" \
672*a9643ea8Slogwang : "=b" (__addr) \
673*a9643ea8Slogwang : \
674*a9643ea8Slogwang : "cc", "memory", "r3" \
675*a9643ea8Slogwang ); \
676*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
677*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
678*a9643ea8Slogwang /* %R3 = guest_NRADDR_GPR2 */ \
679*a9643ea8Slogwang "or 4,4,4\n\t" \
680*a9643ea8Slogwang "mr %0,3" \
681*a9643ea8Slogwang : "=b" (__addr) \
682*a9643ea8Slogwang : \
683*a9643ea8Slogwang : "cc", "memory", "r3" \
684*a9643ea8Slogwang ); \
685*a9643ea8Slogwang _zzq_orig->r2 = __addr; \
686*a9643ea8Slogwang }
687*a9643ea8Slogwang
688*a9643ea8Slogwang #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
689*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
690*a9643ea8Slogwang /* branch-and-link-to-noredir *%R12 */ \
691*a9643ea8Slogwang "or 3,3,3\n\t"
692*a9643ea8Slogwang
693*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
694*a9643ea8Slogwang do { \
695*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
696*a9643ea8Slogwang "or 5,5,5\n\t" \
697*a9643ea8Slogwang ); \
698*a9643ea8Slogwang } while (0)
699*a9643ea8Slogwang
700*a9643ea8Slogwang #endif /* PLAT_ppc64le_linux */
701*a9643ea8Slogwang
702*a9643ea8Slogwang /* ------------------------- arm-linux ------------------------- */
703*a9643ea8Slogwang
704*a9643ea8Slogwang #if defined(PLAT_arm_linux)
705*a9643ea8Slogwang
706*a9643ea8Slogwang typedef
707*a9643ea8Slogwang struct {
708*a9643ea8Slogwang unsigned int nraddr; /* where's the code? */
709*a9643ea8Slogwang }
710*a9643ea8Slogwang OrigFn;
711*a9643ea8Slogwang
712*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
713*a9643ea8Slogwang "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
714*a9643ea8Slogwang "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
715*a9643ea8Slogwang
716*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
717*a9643ea8Slogwang _zzq_default, _zzq_request, \
718*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
719*a9643ea8Slogwang \
720*a9643ea8Slogwang __extension__ \
721*a9643ea8Slogwang ({volatile unsigned int _zzq_args[6]; \
722*a9643ea8Slogwang volatile unsigned int _zzq_result; \
723*a9643ea8Slogwang _zzq_args[0] = (unsigned int)(_zzq_request); \
724*a9643ea8Slogwang _zzq_args[1] = (unsigned int)(_zzq_arg1); \
725*a9643ea8Slogwang _zzq_args[2] = (unsigned int)(_zzq_arg2); \
726*a9643ea8Slogwang _zzq_args[3] = (unsigned int)(_zzq_arg3); \
727*a9643ea8Slogwang _zzq_args[4] = (unsigned int)(_zzq_arg4); \
728*a9643ea8Slogwang _zzq_args[5] = (unsigned int)(_zzq_arg5); \
729*a9643ea8Slogwang __asm__ volatile("mov r3, %1\n\t" /*default*/ \
730*a9643ea8Slogwang "mov r4, %2\n\t" /*ptr*/ \
731*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
732*a9643ea8Slogwang /* R3 = client_request ( R4 ) */ \
733*a9643ea8Slogwang "orr r10, r10, r10\n\t" \
734*a9643ea8Slogwang "mov %0, r3" /*result*/ \
735*a9643ea8Slogwang : "=r" (_zzq_result) \
736*a9643ea8Slogwang : "r" (_zzq_default), "r" (&_zzq_args[0]) \
737*a9643ea8Slogwang : "cc","memory", "r3", "r4"); \
738*a9643ea8Slogwang _zzq_result; \
739*a9643ea8Slogwang })
740*a9643ea8Slogwang
741*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
742*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
743*a9643ea8Slogwang unsigned int __addr; \
744*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
745*a9643ea8Slogwang /* R3 = guest_NRADDR */ \
746*a9643ea8Slogwang "orr r11, r11, r11\n\t" \
747*a9643ea8Slogwang "mov %0, r3" \
748*a9643ea8Slogwang : "=r" (__addr) \
749*a9643ea8Slogwang : \
750*a9643ea8Slogwang : "cc", "memory", "r3" \
751*a9643ea8Slogwang ); \
752*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
753*a9643ea8Slogwang }
754*a9643ea8Slogwang
755*a9643ea8Slogwang #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
756*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
757*a9643ea8Slogwang /* branch-and-link-to-noredir *%R4 */ \
758*a9643ea8Slogwang "orr r12, r12, r12\n\t"
759*a9643ea8Slogwang
760*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
761*a9643ea8Slogwang do { \
762*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
763*a9643ea8Slogwang "orr r9, r9, r9\n\t" \
764*a9643ea8Slogwang : : : "cc", "memory" \
765*a9643ea8Slogwang ); \
766*a9643ea8Slogwang } while (0)
767*a9643ea8Slogwang
768*a9643ea8Slogwang #endif /* PLAT_arm_linux */
769*a9643ea8Slogwang
770*a9643ea8Slogwang /* ------------------------ arm64-linux ------------------------- */
771*a9643ea8Slogwang
772*a9643ea8Slogwang #if defined(PLAT_arm64_linux)
773*a9643ea8Slogwang
774*a9643ea8Slogwang typedef
775*a9643ea8Slogwang struct {
776*a9643ea8Slogwang unsigned long long int nraddr; /* where's the code? */
777*a9643ea8Slogwang }
778*a9643ea8Slogwang OrigFn;
779*a9643ea8Slogwang
780*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
781*a9643ea8Slogwang "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
782*a9643ea8Slogwang "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
783*a9643ea8Slogwang
784*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
785*a9643ea8Slogwang _zzq_default, _zzq_request, \
786*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
787*a9643ea8Slogwang \
788*a9643ea8Slogwang __extension__ \
789*a9643ea8Slogwang ({volatile unsigned long long int _zzq_args[6]; \
790*a9643ea8Slogwang volatile unsigned long long int _zzq_result; \
791*a9643ea8Slogwang _zzq_args[0] = (unsigned long long int)(_zzq_request); \
792*a9643ea8Slogwang _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
793*a9643ea8Slogwang _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
794*a9643ea8Slogwang _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
795*a9643ea8Slogwang _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
796*a9643ea8Slogwang _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
797*a9643ea8Slogwang __asm__ volatile("mov x3, %1\n\t" /*default*/ \
798*a9643ea8Slogwang "mov x4, %2\n\t" /*ptr*/ \
799*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
800*a9643ea8Slogwang /* X3 = client_request ( X4 ) */ \
801*a9643ea8Slogwang "orr x10, x10, x10\n\t" \
802*a9643ea8Slogwang "mov %0, x3" /*result*/ \
803*a9643ea8Slogwang : "=r" (_zzq_result) \
804*a9643ea8Slogwang : "r" (_zzq_default), "r" (&_zzq_args[0]) \
805*a9643ea8Slogwang : "cc","memory", "x3", "x4"); \
806*a9643ea8Slogwang _zzq_result; \
807*a9643ea8Slogwang })
808*a9643ea8Slogwang
809*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
810*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
811*a9643ea8Slogwang unsigned long long int __addr; \
812*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
813*a9643ea8Slogwang /* X3 = guest_NRADDR */ \
814*a9643ea8Slogwang "orr x11, x11, x11\n\t" \
815*a9643ea8Slogwang "mov %0, x3" \
816*a9643ea8Slogwang : "=r" (__addr) \
817*a9643ea8Slogwang : \
818*a9643ea8Slogwang : "cc", "memory", "x3" \
819*a9643ea8Slogwang ); \
820*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
821*a9643ea8Slogwang }
822*a9643ea8Slogwang
823*a9643ea8Slogwang #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
824*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
825*a9643ea8Slogwang /* branch-and-link-to-noredir X8 */ \
826*a9643ea8Slogwang "orr x12, x12, x12\n\t"
827*a9643ea8Slogwang
828*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
829*a9643ea8Slogwang do { \
830*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
831*a9643ea8Slogwang "orr x9, x9, x9\n\t" \
832*a9643ea8Slogwang : : : "cc", "memory" \
833*a9643ea8Slogwang ); \
834*a9643ea8Slogwang } while (0)
835*a9643ea8Slogwang
836*a9643ea8Slogwang #endif /* PLAT_arm64_linux */
837*a9643ea8Slogwang
838*a9643ea8Slogwang /* ------------------------ s390x-linux ------------------------ */
839*a9643ea8Slogwang
840*a9643ea8Slogwang #if defined(PLAT_s390x_linux)
841*a9643ea8Slogwang
842*a9643ea8Slogwang typedef
843*a9643ea8Slogwang struct {
844*a9643ea8Slogwang unsigned long long int nraddr; /* where's the code? */
845*a9643ea8Slogwang }
846*a9643ea8Slogwang OrigFn;
847*a9643ea8Slogwang
848*a9643ea8Slogwang /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
849*a9643ea8Slogwang * code. This detection is implemented in platform specific toIR.c
850*a9643ea8Slogwang * (e.g. VEX/priv/guest_s390_decoder.c).
851*a9643ea8Slogwang */
852*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
853*a9643ea8Slogwang "lr 15,15\n\t" \
854*a9643ea8Slogwang "lr 1,1\n\t" \
855*a9643ea8Slogwang "lr 2,2\n\t" \
856*a9643ea8Slogwang "lr 3,3\n\t"
857*a9643ea8Slogwang
858*a9643ea8Slogwang #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
859*a9643ea8Slogwang #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
860*a9643ea8Slogwang #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
861*a9643ea8Slogwang #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
862*a9643ea8Slogwang
863*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
864*a9643ea8Slogwang _zzq_default, _zzq_request, \
865*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
866*a9643ea8Slogwang __extension__ \
867*a9643ea8Slogwang ({volatile unsigned long long int _zzq_args[6]; \
868*a9643ea8Slogwang volatile unsigned long long int _zzq_result; \
869*a9643ea8Slogwang _zzq_args[0] = (unsigned long long int)(_zzq_request); \
870*a9643ea8Slogwang _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
871*a9643ea8Slogwang _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
872*a9643ea8Slogwang _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
873*a9643ea8Slogwang _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
874*a9643ea8Slogwang _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
875*a9643ea8Slogwang __asm__ volatile(/* r2 = args */ \
876*a9643ea8Slogwang "lgr 2,%1\n\t" \
877*a9643ea8Slogwang /* r3 = default */ \
878*a9643ea8Slogwang "lgr 3,%2\n\t" \
879*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
880*a9643ea8Slogwang __CLIENT_REQUEST_CODE \
881*a9643ea8Slogwang /* results = r3 */ \
882*a9643ea8Slogwang "lgr %0, 3\n\t" \
883*a9643ea8Slogwang : "=d" (_zzq_result) \
884*a9643ea8Slogwang : "a" (&_zzq_args[0]), "0" (_zzq_default) \
885*a9643ea8Slogwang : "cc", "2", "3", "memory" \
886*a9643ea8Slogwang ); \
887*a9643ea8Slogwang _zzq_result; \
888*a9643ea8Slogwang })
889*a9643ea8Slogwang
890*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
891*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
892*a9643ea8Slogwang volatile unsigned long long int __addr; \
893*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
894*a9643ea8Slogwang __GET_NR_CONTEXT_CODE \
895*a9643ea8Slogwang "lgr %0, 3\n\t" \
896*a9643ea8Slogwang : "=a" (__addr) \
897*a9643ea8Slogwang : \
898*a9643ea8Slogwang : "cc", "3", "memory" \
899*a9643ea8Slogwang ); \
900*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
901*a9643ea8Slogwang }
902*a9643ea8Slogwang
903*a9643ea8Slogwang #define VALGRIND_CALL_NOREDIR_R1 \
904*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
905*a9643ea8Slogwang __CALL_NO_REDIR_CODE
906*a9643ea8Slogwang
907*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
908*a9643ea8Slogwang do { \
909*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
910*a9643ea8Slogwang __VEX_INJECT_IR_CODE); \
911*a9643ea8Slogwang } while (0)
912*a9643ea8Slogwang
913*a9643ea8Slogwang #endif /* PLAT_s390x_linux */
914*a9643ea8Slogwang
915*a9643ea8Slogwang /* ------------------------- mips32-linux ---------------- */
916*a9643ea8Slogwang
917*a9643ea8Slogwang #if defined(PLAT_mips32_linux)
918*a9643ea8Slogwang
919*a9643ea8Slogwang typedef
920*a9643ea8Slogwang struct {
921*a9643ea8Slogwang unsigned int nraddr; /* where's the code? */
922*a9643ea8Slogwang }
923*a9643ea8Slogwang OrigFn;
924*a9643ea8Slogwang
925*a9643ea8Slogwang /* .word 0x342
926*a9643ea8Slogwang * .word 0x742
927*a9643ea8Slogwang * .word 0xC2
928*a9643ea8Slogwang * .word 0x4C2*/
929*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
930*a9643ea8Slogwang "srl $0, $0, 13\n\t" \
931*a9643ea8Slogwang "srl $0, $0, 29\n\t" \
932*a9643ea8Slogwang "srl $0, $0, 3\n\t" \
933*a9643ea8Slogwang "srl $0, $0, 19\n\t"
934*a9643ea8Slogwang
935*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
936*a9643ea8Slogwang _zzq_default, _zzq_request, \
937*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
938*a9643ea8Slogwang __extension__ \
939*a9643ea8Slogwang ({ volatile unsigned int _zzq_args[6]; \
940*a9643ea8Slogwang volatile unsigned int _zzq_result; \
941*a9643ea8Slogwang _zzq_args[0] = (unsigned int)(_zzq_request); \
942*a9643ea8Slogwang _zzq_args[1] = (unsigned int)(_zzq_arg1); \
943*a9643ea8Slogwang _zzq_args[2] = (unsigned int)(_zzq_arg2); \
944*a9643ea8Slogwang _zzq_args[3] = (unsigned int)(_zzq_arg3); \
945*a9643ea8Slogwang _zzq_args[4] = (unsigned int)(_zzq_arg4); \
946*a9643ea8Slogwang _zzq_args[5] = (unsigned int)(_zzq_arg5); \
947*a9643ea8Slogwang __asm__ volatile("move $11, %1\n\t" /*default*/ \
948*a9643ea8Slogwang "move $12, %2\n\t" /*ptr*/ \
949*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
950*a9643ea8Slogwang /* T3 = client_request ( T4 ) */ \
951*a9643ea8Slogwang "or $13, $13, $13\n\t" \
952*a9643ea8Slogwang "move %0, $11\n\t" /*result*/ \
953*a9643ea8Slogwang : "=r" (_zzq_result) \
954*a9643ea8Slogwang : "r" (_zzq_default), "r" (&_zzq_args[0]) \
955*a9643ea8Slogwang : "$11", "$12"); \
956*a9643ea8Slogwang _zzq_result; \
957*a9643ea8Slogwang })
958*a9643ea8Slogwang
959*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
960*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
961*a9643ea8Slogwang volatile unsigned int __addr; \
962*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
963*a9643ea8Slogwang /* %t9 = guest_NRADDR */ \
964*a9643ea8Slogwang "or $14, $14, $14\n\t" \
965*a9643ea8Slogwang "move %0, $11" /*result*/ \
966*a9643ea8Slogwang : "=r" (__addr) \
967*a9643ea8Slogwang : \
968*a9643ea8Slogwang : "$11" \
969*a9643ea8Slogwang ); \
970*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
971*a9643ea8Slogwang }
972*a9643ea8Slogwang
973*a9643ea8Slogwang #define VALGRIND_CALL_NOREDIR_T9 \
974*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
975*a9643ea8Slogwang /* call-noredir *%t9 */ \
976*a9643ea8Slogwang "or $15, $15, $15\n\t"
977*a9643ea8Slogwang
978*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
979*a9643ea8Slogwang do { \
980*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
981*a9643ea8Slogwang "or $11, $11, $11\n\t" \
982*a9643ea8Slogwang ); \
983*a9643ea8Slogwang } while (0)
984*a9643ea8Slogwang
985*a9643ea8Slogwang
986*a9643ea8Slogwang #endif /* PLAT_mips32_linux */
987*a9643ea8Slogwang
988*a9643ea8Slogwang /* ------------------------- mips64-linux ---------------- */
989*a9643ea8Slogwang
990*a9643ea8Slogwang #if defined(PLAT_mips64_linux)
991*a9643ea8Slogwang
992*a9643ea8Slogwang typedef
993*a9643ea8Slogwang struct {
994*a9643ea8Slogwang unsigned long long nraddr; /* where's the code? */
995*a9643ea8Slogwang }
996*a9643ea8Slogwang OrigFn;
997*a9643ea8Slogwang
998*a9643ea8Slogwang /* dsll $0,$0, 3
999*a9643ea8Slogwang * dsll $0,$0, 13
1000*a9643ea8Slogwang * dsll $0,$0, 29
1001*a9643ea8Slogwang * dsll $0,$0, 19*/
1002*a9643ea8Slogwang #define __SPECIAL_INSTRUCTION_PREAMBLE \
1003*a9643ea8Slogwang "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
1004*a9643ea8Slogwang "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1005*a9643ea8Slogwang
1006*a9643ea8Slogwang #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1007*a9643ea8Slogwang _zzq_default, _zzq_request, \
1008*a9643ea8Slogwang _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1009*a9643ea8Slogwang __extension__ \
1010*a9643ea8Slogwang ({ volatile unsigned long long int _zzq_args[6]; \
1011*a9643ea8Slogwang volatile unsigned long long int _zzq_result; \
1012*a9643ea8Slogwang _zzq_args[0] = (unsigned long long int)(_zzq_request); \
1013*a9643ea8Slogwang _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
1014*a9643ea8Slogwang _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
1015*a9643ea8Slogwang _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
1016*a9643ea8Slogwang _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
1017*a9643ea8Slogwang _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
1018*a9643ea8Slogwang __asm__ volatile("move $11, %1\n\t" /*default*/ \
1019*a9643ea8Slogwang "move $12, %2\n\t" /*ptr*/ \
1020*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
1021*a9643ea8Slogwang /* $11 = client_request ( $12 ) */ \
1022*a9643ea8Slogwang "or $13, $13, $13\n\t" \
1023*a9643ea8Slogwang "move %0, $11\n\t" /*result*/ \
1024*a9643ea8Slogwang : "=r" (_zzq_result) \
1025*a9643ea8Slogwang : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1026*a9643ea8Slogwang : "$11", "$12"); \
1027*a9643ea8Slogwang _zzq_result; \
1028*a9643ea8Slogwang })
1029*a9643ea8Slogwang
1030*a9643ea8Slogwang #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1031*a9643ea8Slogwang { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1032*a9643ea8Slogwang volatile unsigned long long int __addr; \
1033*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1034*a9643ea8Slogwang /* $11 = guest_NRADDR */ \
1035*a9643ea8Slogwang "or $14, $14, $14\n\t" \
1036*a9643ea8Slogwang "move %0, $11" /*result*/ \
1037*a9643ea8Slogwang : "=r" (__addr) \
1038*a9643ea8Slogwang : \
1039*a9643ea8Slogwang : "$11"); \
1040*a9643ea8Slogwang _zzq_orig->nraddr = __addr; \
1041*a9643ea8Slogwang }
1042*a9643ea8Slogwang
1043*a9643ea8Slogwang #define VALGRIND_CALL_NOREDIR_T9 \
1044*a9643ea8Slogwang __SPECIAL_INSTRUCTION_PREAMBLE \
1045*a9643ea8Slogwang /* call-noredir $25 */ \
1046*a9643ea8Slogwang "or $15, $15, $15\n\t"
1047*a9643ea8Slogwang
1048*a9643ea8Slogwang #define VALGRIND_VEX_INJECT_IR() \
1049*a9643ea8Slogwang do { \
1050*a9643ea8Slogwang __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1051*a9643ea8Slogwang "or $11, $11, $11\n\t" \
1052*a9643ea8Slogwang ); \
1053*a9643ea8Slogwang } while (0)
1054*a9643ea8Slogwang
1055*a9643ea8Slogwang #endif /* PLAT_mips64_linux */
1056*a9643ea8Slogwang
1057*a9643ea8Slogwang /* Insert assembly code for other platforms here... */
1058*a9643ea8Slogwang
1059*a9643ea8Slogwang #endif /* NVALGRIND */
1060*a9643ea8Slogwang
1061*a9643ea8Slogwang
1062*a9643ea8Slogwang /* ------------------------------------------------------------------ */
1063*a9643ea8Slogwang /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1064*a9643ea8Slogwang /* ugly. It's the least-worst tradeoff I can think of. */
1065*a9643ea8Slogwang /* ------------------------------------------------------------------ */
1066*a9643ea8Slogwang
1067*a9643ea8Slogwang /* This section defines magic (a.k.a appalling-hack) macros for doing
1068*a9643ea8Slogwang guaranteed-no-redirection macros, so as to get from function
1069*a9643ea8Slogwang wrappers to the functions they are wrapping. The whole point is to
1070*a9643ea8Slogwang construct standard call sequences, but to do the call itself with a
1071*a9643ea8Slogwang special no-redirect call pseudo-instruction that the JIT
1072*a9643ea8Slogwang understands and handles specially. This section is long and
1073*a9643ea8Slogwang repetitious, and I can't see a way to make it shorter.
1074*a9643ea8Slogwang
1075*a9643ea8Slogwang The naming scheme is as follows:
1076*a9643ea8Slogwang
1077*a9643ea8Slogwang CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1078*a9643ea8Slogwang
1079*a9643ea8Slogwang 'W' stands for "word" and 'v' for "void". Hence there are
1080*a9643ea8Slogwang different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1081*a9643ea8Slogwang and for each, the possibility of returning a word-typed result, or
1082*a9643ea8Slogwang no result.
1083*a9643ea8Slogwang */
1084*a9643ea8Slogwang
1085*a9643ea8Slogwang /* Use these to write the name of your wrapper. NOTE: duplicates
1086*a9643ea8Slogwang VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1087*a9643ea8Slogwang the default behaviour equivalance class tag "0000" into the name.
1088*a9643ea8Slogwang See pub_tool_redir.h for details -- normally you don't need to
1089*a9643ea8Slogwang think about this, though. */
1090*a9643ea8Slogwang
1091*a9643ea8Slogwang /* Use an extra level of macroisation so as to ensure the soname/fnname
1092*a9643ea8Slogwang args are fully macro-expanded before pasting them together. */
1093*a9643ea8Slogwang #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1094*a9643ea8Slogwang
1095*a9643ea8Slogwang #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1096*a9643ea8Slogwang VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1097*a9643ea8Slogwang
1098*a9643ea8Slogwang #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1099*a9643ea8Slogwang VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1100*a9643ea8Slogwang
1101*a9643ea8Slogwang /* Use this macro from within a wrapper function to collect the
1102*a9643ea8Slogwang context (address and possibly other info) of the original function.
1103*a9643ea8Slogwang Once you have that you can then use it in one of the CALL_FN_
1104*a9643ea8Slogwang macros. The type of the argument _lval is OrigFn. */
1105*a9643ea8Slogwang #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1106*a9643ea8Slogwang
1107*a9643ea8Slogwang /* Also provide end-user facilities for function replacement, rather
1108*a9643ea8Slogwang than wrapping. A replacement function differs from a wrapper in
1109*a9643ea8Slogwang that it has no way to get hold of the original function being
1110*a9643ea8Slogwang called, and hence no way to call onwards to it. In a replacement
1111*a9643ea8Slogwang function, VALGRIND_GET_ORIG_FN always returns zero. */
1112*a9643ea8Slogwang
1113*a9643ea8Slogwang #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1114*a9643ea8Slogwang VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1115*a9643ea8Slogwang
1116*a9643ea8Slogwang #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1117*a9643ea8Slogwang VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1118*a9643ea8Slogwang
1119*a9643ea8Slogwang /* Derivatives of the main macros below, for calling functions
1120*a9643ea8Slogwang returning void. */
1121*a9643ea8Slogwang
1122*a9643ea8Slogwang #define CALL_FN_v_v(fnptr) \
1123*a9643ea8Slogwang do { volatile unsigned long _junk; \
1124*a9643ea8Slogwang CALL_FN_W_v(_junk,fnptr); } while (0)
1125*a9643ea8Slogwang
1126*a9643ea8Slogwang #define CALL_FN_v_W(fnptr, arg1) \
1127*a9643ea8Slogwang do { volatile unsigned long _junk; \
1128*a9643ea8Slogwang CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1129*a9643ea8Slogwang
1130*a9643ea8Slogwang #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1131*a9643ea8Slogwang do { volatile unsigned long _junk; \
1132*a9643ea8Slogwang CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1133*a9643ea8Slogwang
1134*a9643ea8Slogwang #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1135*a9643ea8Slogwang do { volatile unsigned long _junk; \
1136*a9643ea8Slogwang CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1137*a9643ea8Slogwang
1138*a9643ea8Slogwang #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1139*a9643ea8Slogwang do { volatile unsigned long _junk; \
1140*a9643ea8Slogwang CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1141*a9643ea8Slogwang
1142*a9643ea8Slogwang #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1143*a9643ea8Slogwang do { volatile unsigned long _junk; \
1144*a9643ea8Slogwang CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1145*a9643ea8Slogwang
1146*a9643ea8Slogwang #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1147*a9643ea8Slogwang do { volatile unsigned long _junk; \
1148*a9643ea8Slogwang CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1149*a9643ea8Slogwang
1150*a9643ea8Slogwang #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1151*a9643ea8Slogwang do { volatile unsigned long _junk; \
1152*a9643ea8Slogwang CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1153*a9643ea8Slogwang
1154*a9643ea8Slogwang /* ------------------------- x86-{linux,darwin} ---------------- */
1155*a9643ea8Slogwang
1156*a9643ea8Slogwang #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1157*a9643ea8Slogwang
1158*a9643ea8Slogwang /* These regs are trashed by the hidden call. No need to mention eax
1159*a9643ea8Slogwang as gcc can already see that, plus causes gcc to bomb. */
1160*a9643ea8Slogwang #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1161*a9643ea8Slogwang
1162*a9643ea8Slogwang /* Macros to save and align the stack before making a function
1163*a9643ea8Slogwang call and restore it afterwards as gcc may not keep the stack
1164*a9643ea8Slogwang pointer aligned if it doesn't realise calls are being made
1165*a9643ea8Slogwang to other functions. */
1166*a9643ea8Slogwang
1167*a9643ea8Slogwang #define VALGRIND_ALIGN_STACK \
1168*a9643ea8Slogwang "movl %%esp,%%edi\n\t" \
1169*a9643ea8Slogwang "andl $0xfffffff0,%%esp\n\t"
1170*a9643ea8Slogwang #define VALGRIND_RESTORE_STACK \
1171*a9643ea8Slogwang "movl %%edi,%%esp\n\t"
1172*a9643ea8Slogwang
1173*a9643ea8Slogwang /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1174*a9643ea8Slogwang long) == 4. */
1175*a9643ea8Slogwang
1176*a9643ea8Slogwang #define CALL_FN_W_v(lval, orig) \
1177*a9643ea8Slogwang do { \
1178*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1179*a9643ea8Slogwang volatile unsigned long _argvec[1]; \
1180*a9643ea8Slogwang volatile unsigned long _res; \
1181*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1182*a9643ea8Slogwang __asm__ volatile( \
1183*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1184*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1185*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1186*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1187*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1188*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1189*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1190*a9643ea8Slogwang ); \
1191*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1192*a9643ea8Slogwang } while (0)
1193*a9643ea8Slogwang
1194*a9643ea8Slogwang #define CALL_FN_W_W(lval, orig, arg1) \
1195*a9643ea8Slogwang do { \
1196*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1197*a9643ea8Slogwang volatile unsigned long _argvec[2]; \
1198*a9643ea8Slogwang volatile unsigned long _res; \
1199*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1200*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1201*a9643ea8Slogwang __asm__ volatile( \
1202*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1203*a9643ea8Slogwang "subl $12, %%esp\n\t" \
1204*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1205*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1206*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1207*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1208*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1209*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1210*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1211*a9643ea8Slogwang ); \
1212*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1213*a9643ea8Slogwang } while (0)
1214*a9643ea8Slogwang
1215*a9643ea8Slogwang #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1216*a9643ea8Slogwang do { \
1217*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1218*a9643ea8Slogwang volatile unsigned long _argvec[3]; \
1219*a9643ea8Slogwang volatile unsigned long _res; \
1220*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1221*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1222*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1223*a9643ea8Slogwang __asm__ volatile( \
1224*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1225*a9643ea8Slogwang "subl $8, %%esp\n\t" \
1226*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1227*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1228*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1229*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1230*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1231*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1232*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1233*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1234*a9643ea8Slogwang ); \
1235*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1236*a9643ea8Slogwang } while (0)
1237*a9643ea8Slogwang
1238*a9643ea8Slogwang #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1239*a9643ea8Slogwang do { \
1240*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1241*a9643ea8Slogwang volatile unsigned long _argvec[4]; \
1242*a9643ea8Slogwang volatile unsigned long _res; \
1243*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1244*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1245*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1246*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1247*a9643ea8Slogwang __asm__ volatile( \
1248*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1249*a9643ea8Slogwang "subl $4, %%esp\n\t" \
1250*a9643ea8Slogwang "pushl 12(%%eax)\n\t" \
1251*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1252*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1253*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1254*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1255*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1256*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1257*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1258*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1259*a9643ea8Slogwang ); \
1260*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1261*a9643ea8Slogwang } while (0)
1262*a9643ea8Slogwang
1263*a9643ea8Slogwang #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1264*a9643ea8Slogwang do { \
1265*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1266*a9643ea8Slogwang volatile unsigned long _argvec[5]; \
1267*a9643ea8Slogwang volatile unsigned long _res; \
1268*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1269*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1270*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1271*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1272*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1273*a9643ea8Slogwang __asm__ volatile( \
1274*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1275*a9643ea8Slogwang "pushl 16(%%eax)\n\t" \
1276*a9643ea8Slogwang "pushl 12(%%eax)\n\t" \
1277*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1278*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1279*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1280*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1281*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1282*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1283*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1284*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1285*a9643ea8Slogwang ); \
1286*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1287*a9643ea8Slogwang } while (0)
1288*a9643ea8Slogwang
1289*a9643ea8Slogwang #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1290*a9643ea8Slogwang do { \
1291*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1292*a9643ea8Slogwang volatile unsigned long _argvec[6]; \
1293*a9643ea8Slogwang volatile unsigned long _res; \
1294*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1295*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1296*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1297*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1298*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1299*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1300*a9643ea8Slogwang __asm__ volatile( \
1301*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1302*a9643ea8Slogwang "subl $12, %%esp\n\t" \
1303*a9643ea8Slogwang "pushl 20(%%eax)\n\t" \
1304*a9643ea8Slogwang "pushl 16(%%eax)\n\t" \
1305*a9643ea8Slogwang "pushl 12(%%eax)\n\t" \
1306*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1307*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1308*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1309*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1310*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1311*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1312*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1313*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1314*a9643ea8Slogwang ); \
1315*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1316*a9643ea8Slogwang } while (0)
1317*a9643ea8Slogwang
1318*a9643ea8Slogwang #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1319*a9643ea8Slogwang do { \
1320*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1321*a9643ea8Slogwang volatile unsigned long _argvec[7]; \
1322*a9643ea8Slogwang volatile unsigned long _res; \
1323*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1324*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1325*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1326*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1327*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1328*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1329*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1330*a9643ea8Slogwang __asm__ volatile( \
1331*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1332*a9643ea8Slogwang "subl $8, %%esp\n\t" \
1333*a9643ea8Slogwang "pushl 24(%%eax)\n\t" \
1334*a9643ea8Slogwang "pushl 20(%%eax)\n\t" \
1335*a9643ea8Slogwang "pushl 16(%%eax)\n\t" \
1336*a9643ea8Slogwang "pushl 12(%%eax)\n\t" \
1337*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1338*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1339*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1340*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1341*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1342*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1343*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1344*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1345*a9643ea8Slogwang ); \
1346*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1347*a9643ea8Slogwang } while (0)
1348*a9643ea8Slogwang
1349*a9643ea8Slogwang #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1350*a9643ea8Slogwang arg7) \
1351*a9643ea8Slogwang do { \
1352*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1353*a9643ea8Slogwang volatile unsigned long _argvec[8]; \
1354*a9643ea8Slogwang volatile unsigned long _res; \
1355*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1356*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1357*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1358*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1359*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1360*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1361*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1362*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
1363*a9643ea8Slogwang __asm__ volatile( \
1364*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1365*a9643ea8Slogwang "subl $4, %%esp\n\t" \
1366*a9643ea8Slogwang "pushl 28(%%eax)\n\t" \
1367*a9643ea8Slogwang "pushl 24(%%eax)\n\t" \
1368*a9643ea8Slogwang "pushl 20(%%eax)\n\t" \
1369*a9643ea8Slogwang "pushl 16(%%eax)\n\t" \
1370*a9643ea8Slogwang "pushl 12(%%eax)\n\t" \
1371*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1372*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1373*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1374*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1375*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1376*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1377*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1378*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1379*a9643ea8Slogwang ); \
1380*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1381*a9643ea8Slogwang } while (0)
1382*a9643ea8Slogwang
1383*a9643ea8Slogwang #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1384*a9643ea8Slogwang arg7,arg8) \
1385*a9643ea8Slogwang do { \
1386*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1387*a9643ea8Slogwang volatile unsigned long _argvec[9]; \
1388*a9643ea8Slogwang volatile unsigned long _res; \
1389*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1390*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1391*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1392*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1393*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1394*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1395*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1396*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
1397*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
1398*a9643ea8Slogwang __asm__ volatile( \
1399*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1400*a9643ea8Slogwang "pushl 32(%%eax)\n\t" \
1401*a9643ea8Slogwang "pushl 28(%%eax)\n\t" \
1402*a9643ea8Slogwang "pushl 24(%%eax)\n\t" \
1403*a9643ea8Slogwang "pushl 20(%%eax)\n\t" \
1404*a9643ea8Slogwang "pushl 16(%%eax)\n\t" \
1405*a9643ea8Slogwang "pushl 12(%%eax)\n\t" \
1406*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1407*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1408*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1409*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1410*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1411*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1412*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1413*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1414*a9643ea8Slogwang ); \
1415*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1416*a9643ea8Slogwang } while (0)
1417*a9643ea8Slogwang
1418*a9643ea8Slogwang #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1419*a9643ea8Slogwang arg7,arg8,arg9) \
1420*a9643ea8Slogwang do { \
1421*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1422*a9643ea8Slogwang volatile unsigned long _argvec[10]; \
1423*a9643ea8Slogwang volatile unsigned long _res; \
1424*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1425*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1426*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1427*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1428*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1429*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1430*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1431*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
1432*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
1433*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
1434*a9643ea8Slogwang __asm__ volatile( \
1435*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1436*a9643ea8Slogwang "subl $12, %%esp\n\t" \
1437*a9643ea8Slogwang "pushl 36(%%eax)\n\t" \
1438*a9643ea8Slogwang "pushl 32(%%eax)\n\t" \
1439*a9643ea8Slogwang "pushl 28(%%eax)\n\t" \
1440*a9643ea8Slogwang "pushl 24(%%eax)\n\t" \
1441*a9643ea8Slogwang "pushl 20(%%eax)\n\t" \
1442*a9643ea8Slogwang "pushl 16(%%eax)\n\t" \
1443*a9643ea8Slogwang "pushl 12(%%eax)\n\t" \
1444*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1445*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1446*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1447*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1448*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1449*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1450*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1451*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1452*a9643ea8Slogwang ); \
1453*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1454*a9643ea8Slogwang } while (0)
1455*a9643ea8Slogwang
1456*a9643ea8Slogwang #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1457*a9643ea8Slogwang arg7,arg8,arg9,arg10) \
1458*a9643ea8Slogwang do { \
1459*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1460*a9643ea8Slogwang volatile unsigned long _argvec[11]; \
1461*a9643ea8Slogwang volatile unsigned long _res; \
1462*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1463*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1464*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1465*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1466*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1467*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1468*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1469*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
1470*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
1471*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
1472*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
1473*a9643ea8Slogwang __asm__ volatile( \
1474*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1475*a9643ea8Slogwang "subl $8, %%esp\n\t" \
1476*a9643ea8Slogwang "pushl 40(%%eax)\n\t" \
1477*a9643ea8Slogwang "pushl 36(%%eax)\n\t" \
1478*a9643ea8Slogwang "pushl 32(%%eax)\n\t" \
1479*a9643ea8Slogwang "pushl 28(%%eax)\n\t" \
1480*a9643ea8Slogwang "pushl 24(%%eax)\n\t" \
1481*a9643ea8Slogwang "pushl 20(%%eax)\n\t" \
1482*a9643ea8Slogwang "pushl 16(%%eax)\n\t" \
1483*a9643ea8Slogwang "pushl 12(%%eax)\n\t" \
1484*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1485*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1486*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1487*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1488*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1489*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1490*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1491*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1492*a9643ea8Slogwang ); \
1493*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1494*a9643ea8Slogwang } while (0)
1495*a9643ea8Slogwang
1496*a9643ea8Slogwang #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1497*a9643ea8Slogwang arg6,arg7,arg8,arg9,arg10, \
1498*a9643ea8Slogwang arg11) \
1499*a9643ea8Slogwang do { \
1500*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1501*a9643ea8Slogwang volatile unsigned long _argvec[12]; \
1502*a9643ea8Slogwang volatile unsigned long _res; \
1503*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1504*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1505*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1506*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1507*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1508*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1509*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1510*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
1511*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
1512*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
1513*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
1514*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
1515*a9643ea8Slogwang __asm__ volatile( \
1516*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1517*a9643ea8Slogwang "subl $4, %%esp\n\t" \
1518*a9643ea8Slogwang "pushl 44(%%eax)\n\t" \
1519*a9643ea8Slogwang "pushl 40(%%eax)\n\t" \
1520*a9643ea8Slogwang "pushl 36(%%eax)\n\t" \
1521*a9643ea8Slogwang "pushl 32(%%eax)\n\t" \
1522*a9643ea8Slogwang "pushl 28(%%eax)\n\t" \
1523*a9643ea8Slogwang "pushl 24(%%eax)\n\t" \
1524*a9643ea8Slogwang "pushl 20(%%eax)\n\t" \
1525*a9643ea8Slogwang "pushl 16(%%eax)\n\t" \
1526*a9643ea8Slogwang "pushl 12(%%eax)\n\t" \
1527*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1528*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1529*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1530*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1531*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1532*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1533*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1534*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1535*a9643ea8Slogwang ); \
1536*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1537*a9643ea8Slogwang } while (0)
1538*a9643ea8Slogwang
1539*a9643ea8Slogwang #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1540*a9643ea8Slogwang arg6,arg7,arg8,arg9,arg10, \
1541*a9643ea8Slogwang arg11,arg12) \
1542*a9643ea8Slogwang do { \
1543*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1544*a9643ea8Slogwang volatile unsigned long _argvec[13]; \
1545*a9643ea8Slogwang volatile unsigned long _res; \
1546*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1547*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1548*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1549*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1550*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1551*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1552*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1553*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
1554*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
1555*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
1556*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
1557*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
1558*a9643ea8Slogwang _argvec[12] = (unsigned long)(arg12); \
1559*a9643ea8Slogwang __asm__ volatile( \
1560*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1561*a9643ea8Slogwang "pushl 48(%%eax)\n\t" \
1562*a9643ea8Slogwang "pushl 44(%%eax)\n\t" \
1563*a9643ea8Slogwang "pushl 40(%%eax)\n\t" \
1564*a9643ea8Slogwang "pushl 36(%%eax)\n\t" \
1565*a9643ea8Slogwang "pushl 32(%%eax)\n\t" \
1566*a9643ea8Slogwang "pushl 28(%%eax)\n\t" \
1567*a9643ea8Slogwang "pushl 24(%%eax)\n\t" \
1568*a9643ea8Slogwang "pushl 20(%%eax)\n\t" \
1569*a9643ea8Slogwang "pushl 16(%%eax)\n\t" \
1570*a9643ea8Slogwang "pushl 12(%%eax)\n\t" \
1571*a9643ea8Slogwang "pushl 8(%%eax)\n\t" \
1572*a9643ea8Slogwang "pushl 4(%%eax)\n\t" \
1573*a9643ea8Slogwang "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1574*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_EAX \
1575*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1576*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1577*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) \
1578*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1579*a9643ea8Slogwang ); \
1580*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1581*a9643ea8Slogwang } while (0)
1582*a9643ea8Slogwang
1583*a9643ea8Slogwang #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1584*a9643ea8Slogwang
1585*a9643ea8Slogwang /* ------------------------ amd64-{linux,darwin} --------------- */
1586*a9643ea8Slogwang
1587*a9643ea8Slogwang #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1588*a9643ea8Slogwang
1589*a9643ea8Slogwang /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1590*a9643ea8Slogwang
1591*a9643ea8Slogwang /* These regs are trashed by the hidden call. */
1592*a9643ea8Slogwang #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1593*a9643ea8Slogwang "rdi", "r8", "r9", "r10", "r11"
1594*a9643ea8Slogwang
1595*a9643ea8Slogwang /* This is all pretty complex. It's so as to make stack unwinding
1596*a9643ea8Slogwang work reliably. See bug 243270. The basic problem is the sub and
1597*a9643ea8Slogwang add of 128 of %rsp in all of the following macros. If gcc believes
1598*a9643ea8Slogwang the CFA is in %rsp, then unwinding may fail, because what's at the
1599*a9643ea8Slogwang CFA is not what gcc "expected" when it constructs the CFIs for the
1600*a9643ea8Slogwang places where the macros are instantiated.
1601*a9643ea8Slogwang
1602*a9643ea8Slogwang But we can't just add a CFI annotation to increase the CFA offset
1603*a9643ea8Slogwang by 128, to match the sub of 128 from %rsp, because we don't know
1604*a9643ea8Slogwang whether gcc has chosen %rsp as the CFA at that point, or whether it
1605*a9643ea8Slogwang has chosen some other register (eg, %rbp). In the latter case,
1606*a9643ea8Slogwang adding a CFI annotation to change the CFA offset is simply wrong.
1607*a9643ea8Slogwang
1608*a9643ea8Slogwang So the solution is to get hold of the CFA using
1609*a9643ea8Slogwang __builtin_dwarf_cfa(), put it in a known register, and add a
1610*a9643ea8Slogwang CFI annotation to say what the register is. We choose %rbp for
1611*a9643ea8Slogwang this (perhaps perversely), because:
1612*a9643ea8Slogwang
1613*a9643ea8Slogwang (1) %rbp is already subject to unwinding. If a new register was
1614*a9643ea8Slogwang chosen then the unwinder would have to unwind it in all stack
1615*a9643ea8Slogwang traces, which is expensive, and
1616*a9643ea8Slogwang
1617*a9643ea8Slogwang (2) %rbp is already subject to precise exception updates in the
1618*a9643ea8Slogwang JIT. If a new register was chosen, we'd have to have precise
1619*a9643ea8Slogwang exceptions for it too, which reduces performance of the
1620*a9643ea8Slogwang generated code.
1621*a9643ea8Slogwang
1622*a9643ea8Slogwang However .. one extra complication. We can't just whack the result
1623*a9643ea8Slogwang of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1624*a9643ea8Slogwang list of trashed registers at the end of the inline assembly
1625*a9643ea8Slogwang fragments; gcc won't allow %rbp to appear in that list. Hence
1626*a9643ea8Slogwang instead we need to stash %rbp in %r15 for the duration of the asm,
1627*a9643ea8Slogwang and say that %r15 is trashed instead. gcc seems happy to go with
1628*a9643ea8Slogwang that.
1629*a9643ea8Slogwang
1630*a9643ea8Slogwang Oh .. and this all needs to be conditionalised so that it is
1631*a9643ea8Slogwang unchanged from before this commit, when compiled with older gccs
1632*a9643ea8Slogwang that don't support __builtin_dwarf_cfa. Furthermore, since
1633*a9643ea8Slogwang this header file is freestanding, it has to be independent of
1634*a9643ea8Slogwang config.h, and so the following conditionalisation cannot depend on
1635*a9643ea8Slogwang configure time checks.
1636*a9643ea8Slogwang
1637*a9643ea8Slogwang Although it's not clear from
1638*a9643ea8Slogwang 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1639*a9643ea8Slogwang this expression excludes Darwin.
1640*a9643ea8Slogwang .cfi directives in Darwin assembly appear to be completely
1641*a9643ea8Slogwang different and I haven't investigated how they work.
1642*a9643ea8Slogwang
1643*a9643ea8Slogwang For even more entertainment value, note we have to use the
1644*a9643ea8Slogwang completely undocumented __builtin_dwarf_cfa(), which appears to
1645*a9643ea8Slogwang really compute the CFA, whereas __builtin_frame_address(0) claims
1646*a9643ea8Slogwang to but actually doesn't. See
1647*a9643ea8Slogwang https://bugs.kde.org/show_bug.cgi?id=243270#c47
1648*a9643ea8Slogwang */
1649*a9643ea8Slogwang #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1650*a9643ea8Slogwang # define __FRAME_POINTER \
1651*a9643ea8Slogwang ,"r"(__builtin_dwarf_cfa())
1652*a9643ea8Slogwang # define VALGRIND_CFI_PROLOGUE \
1653*a9643ea8Slogwang "movq %%rbp, %%r15\n\t" \
1654*a9643ea8Slogwang "movq %2, %%rbp\n\t" \
1655*a9643ea8Slogwang ".cfi_remember_state\n\t" \
1656*a9643ea8Slogwang ".cfi_def_cfa rbp, 0\n\t"
1657*a9643ea8Slogwang # define VALGRIND_CFI_EPILOGUE \
1658*a9643ea8Slogwang "movq %%r15, %%rbp\n\t" \
1659*a9643ea8Slogwang ".cfi_restore_state\n\t"
1660*a9643ea8Slogwang #else
1661*a9643ea8Slogwang # define __FRAME_POINTER
1662*a9643ea8Slogwang # define VALGRIND_CFI_PROLOGUE
1663*a9643ea8Slogwang # define VALGRIND_CFI_EPILOGUE
1664*a9643ea8Slogwang #endif
1665*a9643ea8Slogwang
1666*a9643ea8Slogwang /* Macros to save and align the stack before making a function
1667*a9643ea8Slogwang call and restore it afterwards as gcc may not keep the stack
1668*a9643ea8Slogwang pointer aligned if it doesn't realise calls are being made
1669*a9643ea8Slogwang to other functions. */
1670*a9643ea8Slogwang
1671*a9643ea8Slogwang #define VALGRIND_ALIGN_STACK \
1672*a9643ea8Slogwang "movq %%rsp,%%r14\n\t" \
1673*a9643ea8Slogwang "andq $0xfffffffffffffff0,%%rsp\n\t"
1674*a9643ea8Slogwang #define VALGRIND_RESTORE_STACK \
1675*a9643ea8Slogwang "movq %%r14,%%rsp\n\t"
1676*a9643ea8Slogwang
1677*a9643ea8Slogwang /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1678*a9643ea8Slogwang long) == 8. */
1679*a9643ea8Slogwang
1680*a9643ea8Slogwang /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1681*a9643ea8Slogwang macros. In order not to trash the stack redzone, we need to drop
1682*a9643ea8Slogwang %rsp by 128 before the hidden call, and restore afterwards. The
1683*a9643ea8Slogwang nastyness is that it is only by luck that the stack still appears
1684*a9643ea8Slogwang to be unwindable during the hidden call - since then the behaviour
1685*a9643ea8Slogwang of any routine using this macro does not match what the CFI data
1686*a9643ea8Slogwang says. Sigh.
1687*a9643ea8Slogwang
1688*a9643ea8Slogwang Why is this important? Imagine that a wrapper has a stack
1689*a9643ea8Slogwang allocated local, and passes to the hidden call, a pointer to it.
1690*a9643ea8Slogwang Because gcc does not know about the hidden call, it may allocate
1691*a9643ea8Slogwang that local in the redzone. Unfortunately the hidden call may then
1692*a9643ea8Slogwang trash it before it comes to use it. So we must step clear of the
1693*a9643ea8Slogwang redzone, for the duration of the hidden call, to make it safe.
1694*a9643ea8Slogwang
1695*a9643ea8Slogwang Probably the same problem afflicts the other redzone-style ABIs too
1696*a9643ea8Slogwang (ppc64-linux); but for those, the stack is
1697*a9643ea8Slogwang self describing (none of this CFI nonsense) so at least messing
1698*a9643ea8Slogwang with the stack pointer doesn't give a danger of non-unwindable
1699*a9643ea8Slogwang stack. */
1700*a9643ea8Slogwang
1701*a9643ea8Slogwang #define CALL_FN_W_v(lval, orig) \
1702*a9643ea8Slogwang do { \
1703*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1704*a9643ea8Slogwang volatile unsigned long _argvec[1]; \
1705*a9643ea8Slogwang volatile unsigned long _res; \
1706*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1707*a9643ea8Slogwang __asm__ volatile( \
1708*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
1709*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1710*a9643ea8Slogwang "subq $128,%%rsp\n\t" \
1711*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1712*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
1713*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1714*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
1715*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1716*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1717*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1718*a9643ea8Slogwang ); \
1719*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1720*a9643ea8Slogwang } while (0)
1721*a9643ea8Slogwang
1722*a9643ea8Slogwang #define CALL_FN_W_W(lval, orig, arg1) \
1723*a9643ea8Slogwang do { \
1724*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1725*a9643ea8Slogwang volatile unsigned long _argvec[2]; \
1726*a9643ea8Slogwang volatile unsigned long _res; \
1727*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1728*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1729*a9643ea8Slogwang __asm__ volatile( \
1730*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
1731*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1732*a9643ea8Slogwang "subq $128,%%rsp\n\t" \
1733*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
1734*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1735*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
1736*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1737*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
1738*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1739*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1740*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1741*a9643ea8Slogwang ); \
1742*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1743*a9643ea8Slogwang } while (0)
1744*a9643ea8Slogwang
1745*a9643ea8Slogwang #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1746*a9643ea8Slogwang do { \
1747*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1748*a9643ea8Slogwang volatile unsigned long _argvec[3]; \
1749*a9643ea8Slogwang volatile unsigned long _res; \
1750*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1751*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1752*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1753*a9643ea8Slogwang __asm__ volatile( \
1754*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
1755*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1756*a9643ea8Slogwang "subq $128,%%rsp\n\t" \
1757*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
1758*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
1759*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1760*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
1761*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1762*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
1763*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1764*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1765*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1766*a9643ea8Slogwang ); \
1767*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1768*a9643ea8Slogwang } while (0)
1769*a9643ea8Slogwang
1770*a9643ea8Slogwang #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1771*a9643ea8Slogwang do { \
1772*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1773*a9643ea8Slogwang volatile unsigned long _argvec[4]; \
1774*a9643ea8Slogwang volatile unsigned long _res; \
1775*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1776*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1777*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1778*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1779*a9643ea8Slogwang __asm__ volatile( \
1780*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
1781*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1782*a9643ea8Slogwang "subq $128,%%rsp\n\t" \
1783*a9643ea8Slogwang "movq 24(%%rax), %%rdx\n\t" \
1784*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
1785*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
1786*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1787*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
1788*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1789*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
1790*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1791*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1792*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1793*a9643ea8Slogwang ); \
1794*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1795*a9643ea8Slogwang } while (0)
1796*a9643ea8Slogwang
1797*a9643ea8Slogwang #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1798*a9643ea8Slogwang do { \
1799*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1800*a9643ea8Slogwang volatile unsigned long _argvec[5]; \
1801*a9643ea8Slogwang volatile unsigned long _res; \
1802*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1803*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1804*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1805*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1806*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1807*a9643ea8Slogwang __asm__ volatile( \
1808*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
1809*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1810*a9643ea8Slogwang "subq $128,%%rsp\n\t" \
1811*a9643ea8Slogwang "movq 32(%%rax), %%rcx\n\t" \
1812*a9643ea8Slogwang "movq 24(%%rax), %%rdx\n\t" \
1813*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
1814*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
1815*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1816*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
1817*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1818*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
1819*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1820*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1821*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1822*a9643ea8Slogwang ); \
1823*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1824*a9643ea8Slogwang } while (0)
1825*a9643ea8Slogwang
1826*a9643ea8Slogwang #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1827*a9643ea8Slogwang do { \
1828*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1829*a9643ea8Slogwang volatile unsigned long _argvec[6]; \
1830*a9643ea8Slogwang volatile unsigned long _res; \
1831*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1832*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1833*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1834*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1835*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1836*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1837*a9643ea8Slogwang __asm__ volatile( \
1838*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
1839*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1840*a9643ea8Slogwang "subq $128,%%rsp\n\t" \
1841*a9643ea8Slogwang "movq 40(%%rax), %%r8\n\t" \
1842*a9643ea8Slogwang "movq 32(%%rax), %%rcx\n\t" \
1843*a9643ea8Slogwang "movq 24(%%rax), %%rdx\n\t" \
1844*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
1845*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
1846*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1847*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
1848*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1849*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
1850*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1851*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1852*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1853*a9643ea8Slogwang ); \
1854*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1855*a9643ea8Slogwang } while (0)
1856*a9643ea8Slogwang
1857*a9643ea8Slogwang #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1858*a9643ea8Slogwang do { \
1859*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1860*a9643ea8Slogwang volatile unsigned long _argvec[7]; \
1861*a9643ea8Slogwang volatile unsigned long _res; \
1862*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1863*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1864*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1865*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1866*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1867*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1868*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1869*a9643ea8Slogwang __asm__ volatile( \
1870*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
1871*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1872*a9643ea8Slogwang "subq $128,%%rsp\n\t" \
1873*a9643ea8Slogwang "movq 48(%%rax), %%r9\n\t" \
1874*a9643ea8Slogwang "movq 40(%%rax), %%r8\n\t" \
1875*a9643ea8Slogwang "movq 32(%%rax), %%rcx\n\t" \
1876*a9643ea8Slogwang "movq 24(%%rax), %%rdx\n\t" \
1877*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
1878*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
1879*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1880*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
1881*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1882*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
1883*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1884*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1885*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1886*a9643ea8Slogwang ); \
1887*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1888*a9643ea8Slogwang } while (0)
1889*a9643ea8Slogwang
1890*a9643ea8Slogwang #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1891*a9643ea8Slogwang arg7) \
1892*a9643ea8Slogwang do { \
1893*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1894*a9643ea8Slogwang volatile unsigned long _argvec[8]; \
1895*a9643ea8Slogwang volatile unsigned long _res; \
1896*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1897*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1898*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1899*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1900*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1901*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1902*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1903*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
1904*a9643ea8Slogwang __asm__ volatile( \
1905*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
1906*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1907*a9643ea8Slogwang "subq $136,%%rsp\n\t" \
1908*a9643ea8Slogwang "pushq 56(%%rax)\n\t" \
1909*a9643ea8Slogwang "movq 48(%%rax), %%r9\n\t" \
1910*a9643ea8Slogwang "movq 40(%%rax), %%r8\n\t" \
1911*a9643ea8Slogwang "movq 32(%%rax), %%rcx\n\t" \
1912*a9643ea8Slogwang "movq 24(%%rax), %%rdx\n\t" \
1913*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
1914*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
1915*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1916*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
1917*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1918*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
1919*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1920*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1921*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1922*a9643ea8Slogwang ); \
1923*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1924*a9643ea8Slogwang } while (0)
1925*a9643ea8Slogwang
1926*a9643ea8Slogwang #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1927*a9643ea8Slogwang arg7,arg8) \
1928*a9643ea8Slogwang do { \
1929*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1930*a9643ea8Slogwang volatile unsigned long _argvec[9]; \
1931*a9643ea8Slogwang volatile unsigned long _res; \
1932*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1933*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1934*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1935*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1936*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1937*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1938*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1939*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
1940*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
1941*a9643ea8Slogwang __asm__ volatile( \
1942*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
1943*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1944*a9643ea8Slogwang "subq $128,%%rsp\n\t" \
1945*a9643ea8Slogwang "pushq 64(%%rax)\n\t" \
1946*a9643ea8Slogwang "pushq 56(%%rax)\n\t" \
1947*a9643ea8Slogwang "movq 48(%%rax), %%r9\n\t" \
1948*a9643ea8Slogwang "movq 40(%%rax), %%r8\n\t" \
1949*a9643ea8Slogwang "movq 32(%%rax), %%rcx\n\t" \
1950*a9643ea8Slogwang "movq 24(%%rax), %%rdx\n\t" \
1951*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
1952*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
1953*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1954*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
1955*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1956*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
1957*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1958*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1959*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1960*a9643ea8Slogwang ); \
1961*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
1962*a9643ea8Slogwang } while (0)
1963*a9643ea8Slogwang
1964*a9643ea8Slogwang #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1965*a9643ea8Slogwang arg7,arg8,arg9) \
1966*a9643ea8Slogwang do { \
1967*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
1968*a9643ea8Slogwang volatile unsigned long _argvec[10]; \
1969*a9643ea8Slogwang volatile unsigned long _res; \
1970*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
1971*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
1972*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
1973*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
1974*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
1975*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
1976*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
1977*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
1978*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
1979*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
1980*a9643ea8Slogwang __asm__ volatile( \
1981*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
1982*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
1983*a9643ea8Slogwang "subq $136,%%rsp\n\t" \
1984*a9643ea8Slogwang "pushq 72(%%rax)\n\t" \
1985*a9643ea8Slogwang "pushq 64(%%rax)\n\t" \
1986*a9643ea8Slogwang "pushq 56(%%rax)\n\t" \
1987*a9643ea8Slogwang "movq 48(%%rax), %%r9\n\t" \
1988*a9643ea8Slogwang "movq 40(%%rax), %%r8\n\t" \
1989*a9643ea8Slogwang "movq 32(%%rax), %%rcx\n\t" \
1990*a9643ea8Slogwang "movq 24(%%rax), %%rdx\n\t" \
1991*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
1992*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
1993*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1994*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
1995*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
1996*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
1997*a9643ea8Slogwang : /*out*/ "=a" (_res) \
1998*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1999*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2000*a9643ea8Slogwang ); \
2001*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2002*a9643ea8Slogwang } while (0)
2003*a9643ea8Slogwang
2004*a9643ea8Slogwang #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2005*a9643ea8Slogwang arg7,arg8,arg9,arg10) \
2006*a9643ea8Slogwang do { \
2007*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2008*a9643ea8Slogwang volatile unsigned long _argvec[11]; \
2009*a9643ea8Slogwang volatile unsigned long _res; \
2010*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2011*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
2012*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
2013*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
2014*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
2015*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
2016*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
2017*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
2018*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
2019*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
2020*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
2021*a9643ea8Slogwang __asm__ volatile( \
2022*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
2023*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2024*a9643ea8Slogwang "subq $128,%%rsp\n\t" \
2025*a9643ea8Slogwang "pushq 80(%%rax)\n\t" \
2026*a9643ea8Slogwang "pushq 72(%%rax)\n\t" \
2027*a9643ea8Slogwang "pushq 64(%%rax)\n\t" \
2028*a9643ea8Slogwang "pushq 56(%%rax)\n\t" \
2029*a9643ea8Slogwang "movq 48(%%rax), %%r9\n\t" \
2030*a9643ea8Slogwang "movq 40(%%rax), %%r8\n\t" \
2031*a9643ea8Slogwang "movq 32(%%rax), %%rcx\n\t" \
2032*a9643ea8Slogwang "movq 24(%%rax), %%rdx\n\t" \
2033*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
2034*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
2035*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2036*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
2037*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2038*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
2039*a9643ea8Slogwang : /*out*/ "=a" (_res) \
2040*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2041*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2042*a9643ea8Slogwang ); \
2043*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2044*a9643ea8Slogwang } while (0)
2045*a9643ea8Slogwang
2046*a9643ea8Slogwang #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2047*a9643ea8Slogwang arg7,arg8,arg9,arg10,arg11) \
2048*a9643ea8Slogwang do { \
2049*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2050*a9643ea8Slogwang volatile unsigned long _argvec[12]; \
2051*a9643ea8Slogwang volatile unsigned long _res; \
2052*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2053*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
2054*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
2055*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
2056*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
2057*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
2058*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
2059*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
2060*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
2061*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
2062*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
2063*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
2064*a9643ea8Slogwang __asm__ volatile( \
2065*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
2066*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2067*a9643ea8Slogwang "subq $136,%%rsp\n\t" \
2068*a9643ea8Slogwang "pushq 88(%%rax)\n\t" \
2069*a9643ea8Slogwang "pushq 80(%%rax)\n\t" \
2070*a9643ea8Slogwang "pushq 72(%%rax)\n\t" \
2071*a9643ea8Slogwang "pushq 64(%%rax)\n\t" \
2072*a9643ea8Slogwang "pushq 56(%%rax)\n\t" \
2073*a9643ea8Slogwang "movq 48(%%rax), %%r9\n\t" \
2074*a9643ea8Slogwang "movq 40(%%rax), %%r8\n\t" \
2075*a9643ea8Slogwang "movq 32(%%rax), %%rcx\n\t" \
2076*a9643ea8Slogwang "movq 24(%%rax), %%rdx\n\t" \
2077*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
2078*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
2079*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2080*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
2081*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2082*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
2083*a9643ea8Slogwang : /*out*/ "=a" (_res) \
2084*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2085*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2086*a9643ea8Slogwang ); \
2087*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2088*a9643ea8Slogwang } while (0)
2089*a9643ea8Slogwang
2090*a9643ea8Slogwang #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2091*a9643ea8Slogwang arg7,arg8,arg9,arg10,arg11,arg12) \
2092*a9643ea8Slogwang do { \
2093*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2094*a9643ea8Slogwang volatile unsigned long _argvec[13]; \
2095*a9643ea8Slogwang volatile unsigned long _res; \
2096*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2097*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
2098*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
2099*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
2100*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
2101*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
2102*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
2103*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
2104*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
2105*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
2106*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
2107*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
2108*a9643ea8Slogwang _argvec[12] = (unsigned long)(arg12); \
2109*a9643ea8Slogwang __asm__ volatile( \
2110*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
2111*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2112*a9643ea8Slogwang "subq $128,%%rsp\n\t" \
2113*a9643ea8Slogwang "pushq 96(%%rax)\n\t" \
2114*a9643ea8Slogwang "pushq 88(%%rax)\n\t" \
2115*a9643ea8Slogwang "pushq 80(%%rax)\n\t" \
2116*a9643ea8Slogwang "pushq 72(%%rax)\n\t" \
2117*a9643ea8Slogwang "pushq 64(%%rax)\n\t" \
2118*a9643ea8Slogwang "pushq 56(%%rax)\n\t" \
2119*a9643ea8Slogwang "movq 48(%%rax), %%r9\n\t" \
2120*a9643ea8Slogwang "movq 40(%%rax), %%r8\n\t" \
2121*a9643ea8Slogwang "movq 32(%%rax), %%rcx\n\t" \
2122*a9643ea8Slogwang "movq 24(%%rax), %%rdx\n\t" \
2123*a9643ea8Slogwang "movq 16(%%rax), %%rsi\n\t" \
2124*a9643ea8Slogwang "movq 8(%%rax), %%rdi\n\t" \
2125*a9643ea8Slogwang "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2126*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_RAX \
2127*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2128*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
2129*a9643ea8Slogwang : /*out*/ "=a" (_res) \
2130*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2131*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2132*a9643ea8Slogwang ); \
2133*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2134*a9643ea8Slogwang } while (0)
2135*a9643ea8Slogwang
2136*a9643ea8Slogwang #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2137*a9643ea8Slogwang
2138*a9643ea8Slogwang /* ------------------------ ppc32-linux ------------------------ */
2139*a9643ea8Slogwang
2140*a9643ea8Slogwang #if defined(PLAT_ppc32_linux)
2141*a9643ea8Slogwang
2142*a9643ea8Slogwang /* This is useful for finding out about the on-stack stuff:
2143*a9643ea8Slogwang
2144*a9643ea8Slogwang extern int f9 ( int,int,int,int,int,int,int,int,int );
2145*a9643ea8Slogwang extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2146*a9643ea8Slogwang extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2147*a9643ea8Slogwang extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2148*a9643ea8Slogwang
2149*a9643ea8Slogwang int g9 ( void ) {
2150*a9643ea8Slogwang return f9(11,22,33,44,55,66,77,88,99);
2151*a9643ea8Slogwang }
2152*a9643ea8Slogwang int g10 ( void ) {
2153*a9643ea8Slogwang return f10(11,22,33,44,55,66,77,88,99,110);
2154*a9643ea8Slogwang }
2155*a9643ea8Slogwang int g11 ( void ) {
2156*a9643ea8Slogwang return f11(11,22,33,44,55,66,77,88,99,110,121);
2157*a9643ea8Slogwang }
2158*a9643ea8Slogwang int g12 ( void ) {
2159*a9643ea8Slogwang return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2160*a9643ea8Slogwang }
2161*a9643ea8Slogwang */
2162*a9643ea8Slogwang
2163*a9643ea8Slogwang /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2164*a9643ea8Slogwang
2165*a9643ea8Slogwang /* These regs are trashed by the hidden call. */
2166*a9643ea8Slogwang #define __CALLER_SAVED_REGS \
2167*a9643ea8Slogwang "lr", "ctr", "xer", \
2168*a9643ea8Slogwang "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2169*a9643ea8Slogwang "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2170*a9643ea8Slogwang "r11", "r12", "r13"
2171*a9643ea8Slogwang
2172*a9643ea8Slogwang /* Macros to save and align the stack before making a function
2173*a9643ea8Slogwang call and restore it afterwards as gcc may not keep the stack
2174*a9643ea8Slogwang pointer aligned if it doesn't realise calls are being made
2175*a9643ea8Slogwang to other functions. */
2176*a9643ea8Slogwang
2177*a9643ea8Slogwang #define VALGRIND_ALIGN_STACK \
2178*a9643ea8Slogwang "mr 28,1\n\t" \
2179*a9643ea8Slogwang "rlwinm 1,1,0,0,27\n\t"
2180*a9643ea8Slogwang #define VALGRIND_RESTORE_STACK \
2181*a9643ea8Slogwang "mr 1,28\n\t"
2182*a9643ea8Slogwang
2183*a9643ea8Slogwang /* These CALL_FN_ macros assume that on ppc32-linux,
2184*a9643ea8Slogwang sizeof(unsigned long) == 4. */
2185*a9643ea8Slogwang
2186*a9643ea8Slogwang #define CALL_FN_W_v(lval, orig) \
2187*a9643ea8Slogwang do { \
2188*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2189*a9643ea8Slogwang volatile unsigned long _argvec[1]; \
2190*a9643ea8Slogwang volatile unsigned long _res; \
2191*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2192*a9643ea8Slogwang __asm__ volatile( \
2193*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2194*a9643ea8Slogwang "mr 11,%1\n\t" \
2195*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2196*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2197*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2198*a9643ea8Slogwang "mr %0,3" \
2199*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2200*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2201*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2202*a9643ea8Slogwang ); \
2203*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2204*a9643ea8Slogwang } while (0)
2205*a9643ea8Slogwang
2206*a9643ea8Slogwang #define CALL_FN_W_W(lval, orig, arg1) \
2207*a9643ea8Slogwang do { \
2208*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2209*a9643ea8Slogwang volatile unsigned long _argvec[2]; \
2210*a9643ea8Slogwang volatile unsigned long _res; \
2211*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2212*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2213*a9643ea8Slogwang __asm__ volatile( \
2214*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2215*a9643ea8Slogwang "mr 11,%1\n\t" \
2216*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2217*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2218*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2219*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2220*a9643ea8Slogwang "mr %0,3" \
2221*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2222*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2223*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2224*a9643ea8Slogwang ); \
2225*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2226*a9643ea8Slogwang } while (0)
2227*a9643ea8Slogwang
2228*a9643ea8Slogwang #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2229*a9643ea8Slogwang do { \
2230*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2231*a9643ea8Slogwang volatile unsigned long _argvec[3]; \
2232*a9643ea8Slogwang volatile unsigned long _res; \
2233*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2234*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2235*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2236*a9643ea8Slogwang __asm__ volatile( \
2237*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2238*a9643ea8Slogwang "mr 11,%1\n\t" \
2239*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2240*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2241*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2242*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2243*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2244*a9643ea8Slogwang "mr %0,3" \
2245*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2246*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2247*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2248*a9643ea8Slogwang ); \
2249*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2250*a9643ea8Slogwang } while (0)
2251*a9643ea8Slogwang
2252*a9643ea8Slogwang #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2253*a9643ea8Slogwang do { \
2254*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2255*a9643ea8Slogwang volatile unsigned long _argvec[4]; \
2256*a9643ea8Slogwang volatile unsigned long _res; \
2257*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2258*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2259*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2260*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
2261*a9643ea8Slogwang __asm__ volatile( \
2262*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2263*a9643ea8Slogwang "mr 11,%1\n\t" \
2264*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2265*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2266*a9643ea8Slogwang "lwz 5,12(11)\n\t" \
2267*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2268*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2269*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2270*a9643ea8Slogwang "mr %0,3" \
2271*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2272*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2273*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2274*a9643ea8Slogwang ); \
2275*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2276*a9643ea8Slogwang } while (0)
2277*a9643ea8Slogwang
2278*a9643ea8Slogwang #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2279*a9643ea8Slogwang do { \
2280*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2281*a9643ea8Slogwang volatile unsigned long _argvec[5]; \
2282*a9643ea8Slogwang volatile unsigned long _res; \
2283*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2284*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2285*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2286*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
2287*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
2288*a9643ea8Slogwang __asm__ volatile( \
2289*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2290*a9643ea8Slogwang "mr 11,%1\n\t" \
2291*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2292*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2293*a9643ea8Slogwang "lwz 5,12(11)\n\t" \
2294*a9643ea8Slogwang "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2295*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2296*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2297*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2298*a9643ea8Slogwang "mr %0,3" \
2299*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2300*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2301*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2302*a9643ea8Slogwang ); \
2303*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2304*a9643ea8Slogwang } while (0)
2305*a9643ea8Slogwang
2306*a9643ea8Slogwang #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2307*a9643ea8Slogwang do { \
2308*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2309*a9643ea8Slogwang volatile unsigned long _argvec[6]; \
2310*a9643ea8Slogwang volatile unsigned long _res; \
2311*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2312*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2313*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2314*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
2315*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
2316*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
2317*a9643ea8Slogwang __asm__ volatile( \
2318*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2319*a9643ea8Slogwang "mr 11,%1\n\t" \
2320*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2321*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2322*a9643ea8Slogwang "lwz 5,12(11)\n\t" \
2323*a9643ea8Slogwang "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2324*a9643ea8Slogwang "lwz 7,20(11)\n\t" \
2325*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2326*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2327*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2328*a9643ea8Slogwang "mr %0,3" \
2329*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2330*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2331*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2332*a9643ea8Slogwang ); \
2333*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2334*a9643ea8Slogwang } while (0)
2335*a9643ea8Slogwang
2336*a9643ea8Slogwang #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2337*a9643ea8Slogwang do { \
2338*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2339*a9643ea8Slogwang volatile unsigned long _argvec[7]; \
2340*a9643ea8Slogwang volatile unsigned long _res; \
2341*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2342*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2343*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2344*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
2345*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
2346*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
2347*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
2348*a9643ea8Slogwang __asm__ volatile( \
2349*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2350*a9643ea8Slogwang "mr 11,%1\n\t" \
2351*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2352*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2353*a9643ea8Slogwang "lwz 5,12(11)\n\t" \
2354*a9643ea8Slogwang "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2355*a9643ea8Slogwang "lwz 7,20(11)\n\t" \
2356*a9643ea8Slogwang "lwz 8,24(11)\n\t" \
2357*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2358*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2359*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2360*a9643ea8Slogwang "mr %0,3" \
2361*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2362*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2363*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2364*a9643ea8Slogwang ); \
2365*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2366*a9643ea8Slogwang } while (0)
2367*a9643ea8Slogwang
2368*a9643ea8Slogwang #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2369*a9643ea8Slogwang arg7) \
2370*a9643ea8Slogwang do { \
2371*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2372*a9643ea8Slogwang volatile unsigned long _argvec[8]; \
2373*a9643ea8Slogwang volatile unsigned long _res; \
2374*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2375*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2376*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2377*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
2378*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
2379*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
2380*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
2381*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
2382*a9643ea8Slogwang __asm__ volatile( \
2383*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2384*a9643ea8Slogwang "mr 11,%1\n\t" \
2385*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2386*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2387*a9643ea8Slogwang "lwz 5,12(11)\n\t" \
2388*a9643ea8Slogwang "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2389*a9643ea8Slogwang "lwz 7,20(11)\n\t" \
2390*a9643ea8Slogwang "lwz 8,24(11)\n\t" \
2391*a9643ea8Slogwang "lwz 9,28(11)\n\t" \
2392*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2393*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2394*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2395*a9643ea8Slogwang "mr %0,3" \
2396*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2397*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2398*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2399*a9643ea8Slogwang ); \
2400*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2401*a9643ea8Slogwang } while (0)
2402*a9643ea8Slogwang
2403*a9643ea8Slogwang #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2404*a9643ea8Slogwang arg7,arg8) \
2405*a9643ea8Slogwang do { \
2406*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2407*a9643ea8Slogwang volatile unsigned long _argvec[9]; \
2408*a9643ea8Slogwang volatile unsigned long _res; \
2409*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2410*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2411*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2412*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
2413*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
2414*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
2415*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
2416*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
2417*a9643ea8Slogwang _argvec[8] = (unsigned long)arg8; \
2418*a9643ea8Slogwang __asm__ volatile( \
2419*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2420*a9643ea8Slogwang "mr 11,%1\n\t" \
2421*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2422*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2423*a9643ea8Slogwang "lwz 5,12(11)\n\t" \
2424*a9643ea8Slogwang "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2425*a9643ea8Slogwang "lwz 7,20(11)\n\t" \
2426*a9643ea8Slogwang "lwz 8,24(11)\n\t" \
2427*a9643ea8Slogwang "lwz 9,28(11)\n\t" \
2428*a9643ea8Slogwang "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2429*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2430*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2431*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2432*a9643ea8Slogwang "mr %0,3" \
2433*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2434*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2435*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2436*a9643ea8Slogwang ); \
2437*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2438*a9643ea8Slogwang } while (0)
2439*a9643ea8Slogwang
2440*a9643ea8Slogwang #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2441*a9643ea8Slogwang arg7,arg8,arg9) \
2442*a9643ea8Slogwang do { \
2443*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2444*a9643ea8Slogwang volatile unsigned long _argvec[10]; \
2445*a9643ea8Slogwang volatile unsigned long _res; \
2446*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2447*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2448*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2449*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
2450*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
2451*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
2452*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
2453*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
2454*a9643ea8Slogwang _argvec[8] = (unsigned long)arg8; \
2455*a9643ea8Slogwang _argvec[9] = (unsigned long)arg9; \
2456*a9643ea8Slogwang __asm__ volatile( \
2457*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2458*a9643ea8Slogwang "mr 11,%1\n\t" \
2459*a9643ea8Slogwang "addi 1,1,-16\n\t" \
2460*a9643ea8Slogwang /* arg9 */ \
2461*a9643ea8Slogwang "lwz 3,36(11)\n\t" \
2462*a9643ea8Slogwang "stw 3,8(1)\n\t" \
2463*a9643ea8Slogwang /* args1-8 */ \
2464*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2465*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2466*a9643ea8Slogwang "lwz 5,12(11)\n\t" \
2467*a9643ea8Slogwang "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2468*a9643ea8Slogwang "lwz 7,20(11)\n\t" \
2469*a9643ea8Slogwang "lwz 8,24(11)\n\t" \
2470*a9643ea8Slogwang "lwz 9,28(11)\n\t" \
2471*a9643ea8Slogwang "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2472*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2473*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2474*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2475*a9643ea8Slogwang "mr %0,3" \
2476*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2477*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2478*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2479*a9643ea8Slogwang ); \
2480*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2481*a9643ea8Slogwang } while (0)
2482*a9643ea8Slogwang
2483*a9643ea8Slogwang #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2484*a9643ea8Slogwang arg7,arg8,arg9,arg10) \
2485*a9643ea8Slogwang do { \
2486*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2487*a9643ea8Slogwang volatile unsigned long _argvec[11]; \
2488*a9643ea8Slogwang volatile unsigned long _res; \
2489*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2490*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2491*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2492*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
2493*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
2494*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
2495*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
2496*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
2497*a9643ea8Slogwang _argvec[8] = (unsigned long)arg8; \
2498*a9643ea8Slogwang _argvec[9] = (unsigned long)arg9; \
2499*a9643ea8Slogwang _argvec[10] = (unsigned long)arg10; \
2500*a9643ea8Slogwang __asm__ volatile( \
2501*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2502*a9643ea8Slogwang "mr 11,%1\n\t" \
2503*a9643ea8Slogwang "addi 1,1,-16\n\t" \
2504*a9643ea8Slogwang /* arg10 */ \
2505*a9643ea8Slogwang "lwz 3,40(11)\n\t" \
2506*a9643ea8Slogwang "stw 3,12(1)\n\t" \
2507*a9643ea8Slogwang /* arg9 */ \
2508*a9643ea8Slogwang "lwz 3,36(11)\n\t" \
2509*a9643ea8Slogwang "stw 3,8(1)\n\t" \
2510*a9643ea8Slogwang /* args1-8 */ \
2511*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2512*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2513*a9643ea8Slogwang "lwz 5,12(11)\n\t" \
2514*a9643ea8Slogwang "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2515*a9643ea8Slogwang "lwz 7,20(11)\n\t" \
2516*a9643ea8Slogwang "lwz 8,24(11)\n\t" \
2517*a9643ea8Slogwang "lwz 9,28(11)\n\t" \
2518*a9643ea8Slogwang "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2519*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2520*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2521*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2522*a9643ea8Slogwang "mr %0,3" \
2523*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2524*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2525*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2526*a9643ea8Slogwang ); \
2527*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2528*a9643ea8Slogwang } while (0)
2529*a9643ea8Slogwang
2530*a9643ea8Slogwang #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2531*a9643ea8Slogwang arg7,arg8,arg9,arg10,arg11) \
2532*a9643ea8Slogwang do { \
2533*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2534*a9643ea8Slogwang volatile unsigned long _argvec[12]; \
2535*a9643ea8Slogwang volatile unsigned long _res; \
2536*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2537*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2538*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2539*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
2540*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
2541*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
2542*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
2543*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
2544*a9643ea8Slogwang _argvec[8] = (unsigned long)arg8; \
2545*a9643ea8Slogwang _argvec[9] = (unsigned long)arg9; \
2546*a9643ea8Slogwang _argvec[10] = (unsigned long)arg10; \
2547*a9643ea8Slogwang _argvec[11] = (unsigned long)arg11; \
2548*a9643ea8Slogwang __asm__ volatile( \
2549*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2550*a9643ea8Slogwang "mr 11,%1\n\t" \
2551*a9643ea8Slogwang "addi 1,1,-32\n\t" \
2552*a9643ea8Slogwang /* arg11 */ \
2553*a9643ea8Slogwang "lwz 3,44(11)\n\t" \
2554*a9643ea8Slogwang "stw 3,16(1)\n\t" \
2555*a9643ea8Slogwang /* arg10 */ \
2556*a9643ea8Slogwang "lwz 3,40(11)\n\t" \
2557*a9643ea8Slogwang "stw 3,12(1)\n\t" \
2558*a9643ea8Slogwang /* arg9 */ \
2559*a9643ea8Slogwang "lwz 3,36(11)\n\t" \
2560*a9643ea8Slogwang "stw 3,8(1)\n\t" \
2561*a9643ea8Slogwang /* args1-8 */ \
2562*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2563*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2564*a9643ea8Slogwang "lwz 5,12(11)\n\t" \
2565*a9643ea8Slogwang "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2566*a9643ea8Slogwang "lwz 7,20(11)\n\t" \
2567*a9643ea8Slogwang "lwz 8,24(11)\n\t" \
2568*a9643ea8Slogwang "lwz 9,28(11)\n\t" \
2569*a9643ea8Slogwang "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2570*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2571*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2572*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2573*a9643ea8Slogwang "mr %0,3" \
2574*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2575*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2576*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2577*a9643ea8Slogwang ); \
2578*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2579*a9643ea8Slogwang } while (0)
2580*a9643ea8Slogwang
2581*a9643ea8Slogwang #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2582*a9643ea8Slogwang arg7,arg8,arg9,arg10,arg11,arg12) \
2583*a9643ea8Slogwang do { \
2584*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2585*a9643ea8Slogwang volatile unsigned long _argvec[13]; \
2586*a9643ea8Slogwang volatile unsigned long _res; \
2587*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
2588*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
2589*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
2590*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
2591*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
2592*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
2593*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
2594*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
2595*a9643ea8Slogwang _argvec[8] = (unsigned long)arg8; \
2596*a9643ea8Slogwang _argvec[9] = (unsigned long)arg9; \
2597*a9643ea8Slogwang _argvec[10] = (unsigned long)arg10; \
2598*a9643ea8Slogwang _argvec[11] = (unsigned long)arg11; \
2599*a9643ea8Slogwang _argvec[12] = (unsigned long)arg12; \
2600*a9643ea8Slogwang __asm__ volatile( \
2601*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2602*a9643ea8Slogwang "mr 11,%1\n\t" \
2603*a9643ea8Slogwang "addi 1,1,-32\n\t" \
2604*a9643ea8Slogwang /* arg12 */ \
2605*a9643ea8Slogwang "lwz 3,48(11)\n\t" \
2606*a9643ea8Slogwang "stw 3,20(1)\n\t" \
2607*a9643ea8Slogwang /* arg11 */ \
2608*a9643ea8Slogwang "lwz 3,44(11)\n\t" \
2609*a9643ea8Slogwang "stw 3,16(1)\n\t" \
2610*a9643ea8Slogwang /* arg10 */ \
2611*a9643ea8Slogwang "lwz 3,40(11)\n\t" \
2612*a9643ea8Slogwang "stw 3,12(1)\n\t" \
2613*a9643ea8Slogwang /* arg9 */ \
2614*a9643ea8Slogwang "lwz 3,36(11)\n\t" \
2615*a9643ea8Slogwang "stw 3,8(1)\n\t" \
2616*a9643ea8Slogwang /* args1-8 */ \
2617*a9643ea8Slogwang "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2618*a9643ea8Slogwang "lwz 4,8(11)\n\t" \
2619*a9643ea8Slogwang "lwz 5,12(11)\n\t" \
2620*a9643ea8Slogwang "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2621*a9643ea8Slogwang "lwz 7,20(11)\n\t" \
2622*a9643ea8Slogwang "lwz 8,24(11)\n\t" \
2623*a9643ea8Slogwang "lwz 9,28(11)\n\t" \
2624*a9643ea8Slogwang "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2625*a9643ea8Slogwang "lwz 11,0(11)\n\t" /* target->r11 */ \
2626*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2627*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2628*a9643ea8Slogwang "mr %0,3" \
2629*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2630*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
2631*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2632*a9643ea8Slogwang ); \
2633*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2634*a9643ea8Slogwang } while (0)
2635*a9643ea8Slogwang
2636*a9643ea8Slogwang #endif /* PLAT_ppc32_linux */
2637*a9643ea8Slogwang
2638*a9643ea8Slogwang /* ------------------------ ppc64-linux ------------------------ */
2639*a9643ea8Slogwang
2640*a9643ea8Slogwang #if defined(PLAT_ppc64be_linux)
2641*a9643ea8Slogwang
2642*a9643ea8Slogwang /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2643*a9643ea8Slogwang
2644*a9643ea8Slogwang /* These regs are trashed by the hidden call. */
2645*a9643ea8Slogwang #define __CALLER_SAVED_REGS \
2646*a9643ea8Slogwang "lr", "ctr", "xer", \
2647*a9643ea8Slogwang "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2648*a9643ea8Slogwang "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2649*a9643ea8Slogwang "r11", "r12", "r13"
2650*a9643ea8Slogwang
2651*a9643ea8Slogwang /* Macros to save and align the stack before making a function
2652*a9643ea8Slogwang call and restore it afterwards as gcc may not keep the stack
2653*a9643ea8Slogwang pointer aligned if it doesn't realise calls are being made
2654*a9643ea8Slogwang to other functions. */
2655*a9643ea8Slogwang
2656*a9643ea8Slogwang #define VALGRIND_ALIGN_STACK \
2657*a9643ea8Slogwang "mr 28,1\n\t" \
2658*a9643ea8Slogwang "rldicr 1,1,0,59\n\t"
2659*a9643ea8Slogwang #define VALGRIND_RESTORE_STACK \
2660*a9643ea8Slogwang "mr 1,28\n\t"
2661*a9643ea8Slogwang
2662*a9643ea8Slogwang /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2663*a9643ea8Slogwang long) == 8. */
2664*a9643ea8Slogwang
2665*a9643ea8Slogwang #define CALL_FN_W_v(lval, orig) \
2666*a9643ea8Slogwang do { \
2667*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2668*a9643ea8Slogwang volatile unsigned long _argvec[3+0]; \
2669*a9643ea8Slogwang volatile unsigned long _res; \
2670*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
2671*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
2672*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
2673*a9643ea8Slogwang __asm__ volatile( \
2674*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2675*a9643ea8Slogwang "mr 11,%1\n\t" \
2676*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
2677*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2678*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
2679*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2680*a9643ea8Slogwang "mr 11,%1\n\t" \
2681*a9643ea8Slogwang "mr %0,3\n\t" \
2682*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
2683*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2684*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2685*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
2686*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2687*a9643ea8Slogwang ); \
2688*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2689*a9643ea8Slogwang } while (0)
2690*a9643ea8Slogwang
2691*a9643ea8Slogwang #define CALL_FN_W_W(lval, orig, arg1) \
2692*a9643ea8Slogwang do { \
2693*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2694*a9643ea8Slogwang volatile unsigned long _argvec[3+1]; \
2695*a9643ea8Slogwang volatile unsigned long _res; \
2696*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
2697*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
2698*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
2699*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
2700*a9643ea8Slogwang __asm__ volatile( \
2701*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2702*a9643ea8Slogwang "mr 11,%1\n\t" \
2703*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
2704*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2705*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2706*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
2707*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2708*a9643ea8Slogwang "mr 11,%1\n\t" \
2709*a9643ea8Slogwang "mr %0,3\n\t" \
2710*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
2711*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2712*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2713*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
2714*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2715*a9643ea8Slogwang ); \
2716*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2717*a9643ea8Slogwang } while (0)
2718*a9643ea8Slogwang
2719*a9643ea8Slogwang #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2720*a9643ea8Slogwang do { \
2721*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2722*a9643ea8Slogwang volatile unsigned long _argvec[3+2]; \
2723*a9643ea8Slogwang volatile unsigned long _res; \
2724*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
2725*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
2726*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
2727*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
2728*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
2729*a9643ea8Slogwang __asm__ volatile( \
2730*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2731*a9643ea8Slogwang "mr 11,%1\n\t" \
2732*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
2733*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2734*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2735*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2736*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
2737*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2738*a9643ea8Slogwang "mr 11,%1\n\t" \
2739*a9643ea8Slogwang "mr %0,3\n\t" \
2740*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
2741*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2742*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2743*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
2744*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2745*a9643ea8Slogwang ); \
2746*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2747*a9643ea8Slogwang } while (0)
2748*a9643ea8Slogwang
2749*a9643ea8Slogwang #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2750*a9643ea8Slogwang do { \
2751*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2752*a9643ea8Slogwang volatile unsigned long _argvec[3+3]; \
2753*a9643ea8Slogwang volatile unsigned long _res; \
2754*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
2755*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
2756*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
2757*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
2758*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
2759*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
2760*a9643ea8Slogwang __asm__ volatile( \
2761*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2762*a9643ea8Slogwang "mr 11,%1\n\t" \
2763*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
2764*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2765*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2766*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2767*a9643ea8Slogwang "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2768*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
2769*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2770*a9643ea8Slogwang "mr 11,%1\n\t" \
2771*a9643ea8Slogwang "mr %0,3\n\t" \
2772*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
2773*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2774*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2775*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
2776*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2777*a9643ea8Slogwang ); \
2778*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2779*a9643ea8Slogwang } while (0)
2780*a9643ea8Slogwang
2781*a9643ea8Slogwang #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2782*a9643ea8Slogwang do { \
2783*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2784*a9643ea8Slogwang volatile unsigned long _argvec[3+4]; \
2785*a9643ea8Slogwang volatile unsigned long _res; \
2786*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
2787*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
2788*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
2789*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
2790*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
2791*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
2792*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
2793*a9643ea8Slogwang __asm__ volatile( \
2794*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2795*a9643ea8Slogwang "mr 11,%1\n\t" \
2796*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
2797*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2798*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2799*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2800*a9643ea8Slogwang "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2801*a9643ea8Slogwang "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2802*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
2803*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2804*a9643ea8Slogwang "mr 11,%1\n\t" \
2805*a9643ea8Slogwang "mr %0,3\n\t" \
2806*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
2807*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2808*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2809*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
2810*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2811*a9643ea8Slogwang ); \
2812*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2813*a9643ea8Slogwang } while (0)
2814*a9643ea8Slogwang
2815*a9643ea8Slogwang #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2816*a9643ea8Slogwang do { \
2817*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2818*a9643ea8Slogwang volatile unsigned long _argvec[3+5]; \
2819*a9643ea8Slogwang volatile unsigned long _res; \
2820*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
2821*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
2822*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
2823*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
2824*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
2825*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
2826*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
2827*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
2828*a9643ea8Slogwang __asm__ volatile( \
2829*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2830*a9643ea8Slogwang "mr 11,%1\n\t" \
2831*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
2832*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2833*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2834*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2835*a9643ea8Slogwang "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2836*a9643ea8Slogwang "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2837*a9643ea8Slogwang "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2838*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
2839*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2840*a9643ea8Slogwang "mr 11,%1\n\t" \
2841*a9643ea8Slogwang "mr %0,3\n\t" \
2842*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
2843*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2844*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2845*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
2846*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2847*a9643ea8Slogwang ); \
2848*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2849*a9643ea8Slogwang } while (0)
2850*a9643ea8Slogwang
2851*a9643ea8Slogwang #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2852*a9643ea8Slogwang do { \
2853*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2854*a9643ea8Slogwang volatile unsigned long _argvec[3+6]; \
2855*a9643ea8Slogwang volatile unsigned long _res; \
2856*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
2857*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
2858*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
2859*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
2860*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
2861*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
2862*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
2863*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
2864*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
2865*a9643ea8Slogwang __asm__ volatile( \
2866*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2867*a9643ea8Slogwang "mr 11,%1\n\t" \
2868*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
2869*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2870*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2871*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2872*a9643ea8Slogwang "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2873*a9643ea8Slogwang "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2874*a9643ea8Slogwang "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2875*a9643ea8Slogwang "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2876*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
2877*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2878*a9643ea8Slogwang "mr 11,%1\n\t" \
2879*a9643ea8Slogwang "mr %0,3\n\t" \
2880*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
2881*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2882*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2883*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
2884*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2885*a9643ea8Slogwang ); \
2886*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2887*a9643ea8Slogwang } while (0)
2888*a9643ea8Slogwang
2889*a9643ea8Slogwang #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2890*a9643ea8Slogwang arg7) \
2891*a9643ea8Slogwang do { \
2892*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2893*a9643ea8Slogwang volatile unsigned long _argvec[3+7]; \
2894*a9643ea8Slogwang volatile unsigned long _res; \
2895*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
2896*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
2897*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
2898*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
2899*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
2900*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
2901*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
2902*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
2903*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
2904*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
2905*a9643ea8Slogwang __asm__ volatile( \
2906*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2907*a9643ea8Slogwang "mr 11,%1\n\t" \
2908*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
2909*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2910*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2911*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2912*a9643ea8Slogwang "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2913*a9643ea8Slogwang "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2914*a9643ea8Slogwang "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2915*a9643ea8Slogwang "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2916*a9643ea8Slogwang "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2917*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
2918*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2919*a9643ea8Slogwang "mr 11,%1\n\t" \
2920*a9643ea8Slogwang "mr %0,3\n\t" \
2921*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
2922*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2923*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2924*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
2925*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2926*a9643ea8Slogwang ); \
2927*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2928*a9643ea8Slogwang } while (0)
2929*a9643ea8Slogwang
2930*a9643ea8Slogwang #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2931*a9643ea8Slogwang arg7,arg8) \
2932*a9643ea8Slogwang do { \
2933*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2934*a9643ea8Slogwang volatile unsigned long _argvec[3+8]; \
2935*a9643ea8Slogwang volatile unsigned long _res; \
2936*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
2937*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
2938*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
2939*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
2940*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
2941*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
2942*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
2943*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
2944*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
2945*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
2946*a9643ea8Slogwang _argvec[2+8] = (unsigned long)arg8; \
2947*a9643ea8Slogwang __asm__ volatile( \
2948*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2949*a9643ea8Slogwang "mr 11,%1\n\t" \
2950*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
2951*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2952*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2953*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2954*a9643ea8Slogwang "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2955*a9643ea8Slogwang "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2956*a9643ea8Slogwang "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2957*a9643ea8Slogwang "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2958*a9643ea8Slogwang "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2959*a9643ea8Slogwang "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2960*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
2961*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2962*a9643ea8Slogwang "mr 11,%1\n\t" \
2963*a9643ea8Slogwang "mr %0,3\n\t" \
2964*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
2965*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
2966*a9643ea8Slogwang : /*out*/ "=r" (_res) \
2967*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
2968*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2969*a9643ea8Slogwang ); \
2970*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
2971*a9643ea8Slogwang } while (0)
2972*a9643ea8Slogwang
2973*a9643ea8Slogwang #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2974*a9643ea8Slogwang arg7,arg8,arg9) \
2975*a9643ea8Slogwang do { \
2976*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
2977*a9643ea8Slogwang volatile unsigned long _argvec[3+9]; \
2978*a9643ea8Slogwang volatile unsigned long _res; \
2979*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
2980*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
2981*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
2982*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
2983*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
2984*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
2985*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
2986*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
2987*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
2988*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
2989*a9643ea8Slogwang _argvec[2+8] = (unsigned long)arg8; \
2990*a9643ea8Slogwang _argvec[2+9] = (unsigned long)arg9; \
2991*a9643ea8Slogwang __asm__ volatile( \
2992*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
2993*a9643ea8Slogwang "mr 11,%1\n\t" \
2994*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
2995*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2996*a9643ea8Slogwang "addi 1,1,-128\n\t" /* expand stack frame */ \
2997*a9643ea8Slogwang /* arg9 */ \
2998*a9643ea8Slogwang "ld 3,72(11)\n\t" \
2999*a9643ea8Slogwang "std 3,112(1)\n\t" \
3000*a9643ea8Slogwang /* args1-8 */ \
3001*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3002*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3003*a9643ea8Slogwang "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3004*a9643ea8Slogwang "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3005*a9643ea8Slogwang "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3006*a9643ea8Slogwang "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3007*a9643ea8Slogwang "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3008*a9643ea8Slogwang "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3009*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
3010*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3011*a9643ea8Slogwang "mr 11,%1\n\t" \
3012*a9643ea8Slogwang "mr %0,3\n\t" \
3013*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
3014*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3015*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3016*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3017*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3018*a9643ea8Slogwang ); \
3019*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3020*a9643ea8Slogwang } while (0)
3021*a9643ea8Slogwang
3022*a9643ea8Slogwang #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3023*a9643ea8Slogwang arg7,arg8,arg9,arg10) \
3024*a9643ea8Slogwang do { \
3025*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3026*a9643ea8Slogwang volatile unsigned long _argvec[3+10]; \
3027*a9643ea8Slogwang volatile unsigned long _res; \
3028*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3029*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3030*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3031*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3032*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3033*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3034*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3035*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3036*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
3037*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
3038*a9643ea8Slogwang _argvec[2+8] = (unsigned long)arg8; \
3039*a9643ea8Slogwang _argvec[2+9] = (unsigned long)arg9; \
3040*a9643ea8Slogwang _argvec[2+10] = (unsigned long)arg10; \
3041*a9643ea8Slogwang __asm__ volatile( \
3042*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3043*a9643ea8Slogwang "mr 11,%1\n\t" \
3044*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
3045*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3046*a9643ea8Slogwang "addi 1,1,-128\n\t" /* expand stack frame */ \
3047*a9643ea8Slogwang /* arg10 */ \
3048*a9643ea8Slogwang "ld 3,80(11)\n\t" \
3049*a9643ea8Slogwang "std 3,120(1)\n\t" \
3050*a9643ea8Slogwang /* arg9 */ \
3051*a9643ea8Slogwang "ld 3,72(11)\n\t" \
3052*a9643ea8Slogwang "std 3,112(1)\n\t" \
3053*a9643ea8Slogwang /* args1-8 */ \
3054*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3055*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3056*a9643ea8Slogwang "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3057*a9643ea8Slogwang "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3058*a9643ea8Slogwang "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3059*a9643ea8Slogwang "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3060*a9643ea8Slogwang "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3061*a9643ea8Slogwang "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3062*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
3063*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3064*a9643ea8Slogwang "mr 11,%1\n\t" \
3065*a9643ea8Slogwang "mr %0,3\n\t" \
3066*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
3067*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3068*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3069*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3070*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3071*a9643ea8Slogwang ); \
3072*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3073*a9643ea8Slogwang } while (0)
3074*a9643ea8Slogwang
3075*a9643ea8Slogwang #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3076*a9643ea8Slogwang arg7,arg8,arg9,arg10,arg11) \
3077*a9643ea8Slogwang do { \
3078*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3079*a9643ea8Slogwang volatile unsigned long _argvec[3+11]; \
3080*a9643ea8Slogwang volatile unsigned long _res; \
3081*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3082*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3083*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3084*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3085*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3086*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3087*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3088*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3089*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
3090*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
3091*a9643ea8Slogwang _argvec[2+8] = (unsigned long)arg8; \
3092*a9643ea8Slogwang _argvec[2+9] = (unsigned long)arg9; \
3093*a9643ea8Slogwang _argvec[2+10] = (unsigned long)arg10; \
3094*a9643ea8Slogwang _argvec[2+11] = (unsigned long)arg11; \
3095*a9643ea8Slogwang __asm__ volatile( \
3096*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3097*a9643ea8Slogwang "mr 11,%1\n\t" \
3098*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
3099*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3100*a9643ea8Slogwang "addi 1,1,-144\n\t" /* expand stack frame */ \
3101*a9643ea8Slogwang /* arg11 */ \
3102*a9643ea8Slogwang "ld 3,88(11)\n\t" \
3103*a9643ea8Slogwang "std 3,128(1)\n\t" \
3104*a9643ea8Slogwang /* arg10 */ \
3105*a9643ea8Slogwang "ld 3,80(11)\n\t" \
3106*a9643ea8Slogwang "std 3,120(1)\n\t" \
3107*a9643ea8Slogwang /* arg9 */ \
3108*a9643ea8Slogwang "ld 3,72(11)\n\t" \
3109*a9643ea8Slogwang "std 3,112(1)\n\t" \
3110*a9643ea8Slogwang /* args1-8 */ \
3111*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3112*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3113*a9643ea8Slogwang "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3114*a9643ea8Slogwang "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3115*a9643ea8Slogwang "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3116*a9643ea8Slogwang "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3117*a9643ea8Slogwang "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3118*a9643ea8Slogwang "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3119*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
3120*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3121*a9643ea8Slogwang "mr 11,%1\n\t" \
3122*a9643ea8Slogwang "mr %0,3\n\t" \
3123*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
3124*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3125*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3126*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3127*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3128*a9643ea8Slogwang ); \
3129*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3130*a9643ea8Slogwang } while (0)
3131*a9643ea8Slogwang
3132*a9643ea8Slogwang #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3133*a9643ea8Slogwang arg7,arg8,arg9,arg10,arg11,arg12) \
3134*a9643ea8Slogwang do { \
3135*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3136*a9643ea8Slogwang volatile unsigned long _argvec[3+12]; \
3137*a9643ea8Slogwang volatile unsigned long _res; \
3138*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3139*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3140*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3141*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3142*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3143*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3144*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3145*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3146*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
3147*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
3148*a9643ea8Slogwang _argvec[2+8] = (unsigned long)arg8; \
3149*a9643ea8Slogwang _argvec[2+9] = (unsigned long)arg9; \
3150*a9643ea8Slogwang _argvec[2+10] = (unsigned long)arg10; \
3151*a9643ea8Slogwang _argvec[2+11] = (unsigned long)arg11; \
3152*a9643ea8Slogwang _argvec[2+12] = (unsigned long)arg12; \
3153*a9643ea8Slogwang __asm__ volatile( \
3154*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3155*a9643ea8Slogwang "mr 11,%1\n\t" \
3156*a9643ea8Slogwang "std 2,-16(11)\n\t" /* save tocptr */ \
3157*a9643ea8Slogwang "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3158*a9643ea8Slogwang "addi 1,1,-144\n\t" /* expand stack frame */ \
3159*a9643ea8Slogwang /* arg12 */ \
3160*a9643ea8Slogwang "ld 3,96(11)\n\t" \
3161*a9643ea8Slogwang "std 3,136(1)\n\t" \
3162*a9643ea8Slogwang /* arg11 */ \
3163*a9643ea8Slogwang "ld 3,88(11)\n\t" \
3164*a9643ea8Slogwang "std 3,128(1)\n\t" \
3165*a9643ea8Slogwang /* arg10 */ \
3166*a9643ea8Slogwang "ld 3,80(11)\n\t" \
3167*a9643ea8Slogwang "std 3,120(1)\n\t" \
3168*a9643ea8Slogwang /* arg9 */ \
3169*a9643ea8Slogwang "ld 3,72(11)\n\t" \
3170*a9643ea8Slogwang "std 3,112(1)\n\t" \
3171*a9643ea8Slogwang /* args1-8 */ \
3172*a9643ea8Slogwang "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3173*a9643ea8Slogwang "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3174*a9643ea8Slogwang "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3175*a9643ea8Slogwang "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3176*a9643ea8Slogwang "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3177*a9643ea8Slogwang "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3178*a9643ea8Slogwang "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3179*a9643ea8Slogwang "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3180*a9643ea8Slogwang "ld 11, 0(11)\n\t" /* target->r11 */ \
3181*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3182*a9643ea8Slogwang "mr 11,%1\n\t" \
3183*a9643ea8Slogwang "mr %0,3\n\t" \
3184*a9643ea8Slogwang "ld 2,-16(11)\n\t" /* restore tocptr */ \
3185*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3186*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3187*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3188*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3189*a9643ea8Slogwang ); \
3190*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3191*a9643ea8Slogwang } while (0)
3192*a9643ea8Slogwang
3193*a9643ea8Slogwang #endif /* PLAT_ppc64be_linux */
3194*a9643ea8Slogwang
3195*a9643ea8Slogwang /* ------------------------- ppc64le-linux ----------------------- */
3196*a9643ea8Slogwang #if defined(PLAT_ppc64le_linux)
3197*a9643ea8Slogwang
3198*a9643ea8Slogwang /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3199*a9643ea8Slogwang
3200*a9643ea8Slogwang /* These regs are trashed by the hidden call. */
3201*a9643ea8Slogwang #define __CALLER_SAVED_REGS \
3202*a9643ea8Slogwang "lr", "ctr", "xer", \
3203*a9643ea8Slogwang "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3204*a9643ea8Slogwang "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3205*a9643ea8Slogwang "r11", "r12", "r13"
3206*a9643ea8Slogwang
3207*a9643ea8Slogwang /* Macros to save and align the stack before making a function
3208*a9643ea8Slogwang call and restore it afterwards as gcc may not keep the stack
3209*a9643ea8Slogwang pointer aligned if it doesn't realise calls are being made
3210*a9643ea8Slogwang to other functions. */
3211*a9643ea8Slogwang
3212*a9643ea8Slogwang #define VALGRIND_ALIGN_STACK \
3213*a9643ea8Slogwang "mr 28,1\n\t" \
3214*a9643ea8Slogwang "rldicr 1,1,0,59\n\t"
3215*a9643ea8Slogwang #define VALGRIND_RESTORE_STACK \
3216*a9643ea8Slogwang "mr 1,28\n\t"
3217*a9643ea8Slogwang
3218*a9643ea8Slogwang /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3219*a9643ea8Slogwang long) == 8. */
3220*a9643ea8Slogwang
3221*a9643ea8Slogwang #define CALL_FN_W_v(lval, orig) \
3222*a9643ea8Slogwang do { \
3223*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3224*a9643ea8Slogwang volatile unsigned long _argvec[3+0]; \
3225*a9643ea8Slogwang volatile unsigned long _res; \
3226*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3227*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3228*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3229*a9643ea8Slogwang __asm__ volatile( \
3230*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3231*a9643ea8Slogwang "mr 12,%1\n\t" \
3232*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3233*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3234*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3235*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3236*a9643ea8Slogwang "mr 12,%1\n\t" \
3237*a9643ea8Slogwang "mr %0,3\n\t" \
3238*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3239*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3240*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3241*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3242*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3243*a9643ea8Slogwang ); \
3244*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3245*a9643ea8Slogwang } while (0)
3246*a9643ea8Slogwang
3247*a9643ea8Slogwang #define CALL_FN_W_W(lval, orig, arg1) \
3248*a9643ea8Slogwang do { \
3249*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3250*a9643ea8Slogwang volatile unsigned long _argvec[3+1]; \
3251*a9643ea8Slogwang volatile unsigned long _res; \
3252*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3253*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3254*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3255*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3256*a9643ea8Slogwang __asm__ volatile( \
3257*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3258*a9643ea8Slogwang "mr 12,%1\n\t" \
3259*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3260*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3261*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3262*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3263*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3264*a9643ea8Slogwang "mr 12,%1\n\t" \
3265*a9643ea8Slogwang "mr %0,3\n\t" \
3266*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3267*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3268*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3269*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3270*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3271*a9643ea8Slogwang ); \
3272*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3273*a9643ea8Slogwang } while (0)
3274*a9643ea8Slogwang
3275*a9643ea8Slogwang #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3276*a9643ea8Slogwang do { \
3277*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3278*a9643ea8Slogwang volatile unsigned long _argvec[3+2]; \
3279*a9643ea8Slogwang volatile unsigned long _res; \
3280*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3281*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3282*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3283*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3284*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3285*a9643ea8Slogwang __asm__ volatile( \
3286*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3287*a9643ea8Slogwang "mr 12,%1\n\t" \
3288*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3289*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3290*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3291*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3292*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3293*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3294*a9643ea8Slogwang "mr 12,%1\n\t" \
3295*a9643ea8Slogwang "mr %0,3\n\t" \
3296*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3297*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3298*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3299*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3300*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3301*a9643ea8Slogwang ); \
3302*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3303*a9643ea8Slogwang } while (0)
3304*a9643ea8Slogwang
3305*a9643ea8Slogwang #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3306*a9643ea8Slogwang do { \
3307*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3308*a9643ea8Slogwang volatile unsigned long _argvec[3+3]; \
3309*a9643ea8Slogwang volatile unsigned long _res; \
3310*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3311*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3312*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3313*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3314*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3315*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3316*a9643ea8Slogwang __asm__ volatile( \
3317*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3318*a9643ea8Slogwang "mr 12,%1\n\t" \
3319*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3320*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3321*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3322*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3323*a9643ea8Slogwang "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3324*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3325*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3326*a9643ea8Slogwang "mr 12,%1\n\t" \
3327*a9643ea8Slogwang "mr %0,3\n\t" \
3328*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3329*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3330*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3331*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3332*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3333*a9643ea8Slogwang ); \
3334*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3335*a9643ea8Slogwang } while (0)
3336*a9643ea8Slogwang
3337*a9643ea8Slogwang #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3338*a9643ea8Slogwang do { \
3339*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3340*a9643ea8Slogwang volatile unsigned long _argvec[3+4]; \
3341*a9643ea8Slogwang volatile unsigned long _res; \
3342*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3343*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3344*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3345*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3346*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3347*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3348*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3349*a9643ea8Slogwang __asm__ volatile( \
3350*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3351*a9643ea8Slogwang "mr 12,%1\n\t" \
3352*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3353*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3354*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3355*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3356*a9643ea8Slogwang "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3357*a9643ea8Slogwang "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3358*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3359*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3360*a9643ea8Slogwang "mr 12,%1\n\t" \
3361*a9643ea8Slogwang "mr %0,3\n\t" \
3362*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3363*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3364*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3365*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3366*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3367*a9643ea8Slogwang ); \
3368*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3369*a9643ea8Slogwang } while (0)
3370*a9643ea8Slogwang
3371*a9643ea8Slogwang #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3372*a9643ea8Slogwang do { \
3373*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3374*a9643ea8Slogwang volatile unsigned long _argvec[3+5]; \
3375*a9643ea8Slogwang volatile unsigned long _res; \
3376*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3377*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3378*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3379*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3380*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3381*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3382*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3383*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3384*a9643ea8Slogwang __asm__ volatile( \
3385*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3386*a9643ea8Slogwang "mr 12,%1\n\t" \
3387*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3388*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3389*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3390*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3391*a9643ea8Slogwang "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3392*a9643ea8Slogwang "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3393*a9643ea8Slogwang "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3394*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3395*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3396*a9643ea8Slogwang "mr 12,%1\n\t" \
3397*a9643ea8Slogwang "mr %0,3\n\t" \
3398*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3399*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3400*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3401*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3402*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3403*a9643ea8Slogwang ); \
3404*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3405*a9643ea8Slogwang } while (0)
3406*a9643ea8Slogwang
3407*a9643ea8Slogwang #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3408*a9643ea8Slogwang do { \
3409*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3410*a9643ea8Slogwang volatile unsigned long _argvec[3+6]; \
3411*a9643ea8Slogwang volatile unsigned long _res; \
3412*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3413*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3414*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3415*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3416*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3417*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3418*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3419*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3420*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
3421*a9643ea8Slogwang __asm__ volatile( \
3422*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3423*a9643ea8Slogwang "mr 12,%1\n\t" \
3424*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3425*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3426*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3427*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3428*a9643ea8Slogwang "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3429*a9643ea8Slogwang "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3430*a9643ea8Slogwang "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3431*a9643ea8Slogwang "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3432*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3433*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3434*a9643ea8Slogwang "mr 12,%1\n\t" \
3435*a9643ea8Slogwang "mr %0,3\n\t" \
3436*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3437*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3438*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3439*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3440*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3441*a9643ea8Slogwang ); \
3442*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3443*a9643ea8Slogwang } while (0)
3444*a9643ea8Slogwang
3445*a9643ea8Slogwang #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3446*a9643ea8Slogwang arg7) \
3447*a9643ea8Slogwang do { \
3448*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3449*a9643ea8Slogwang volatile unsigned long _argvec[3+7]; \
3450*a9643ea8Slogwang volatile unsigned long _res; \
3451*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3452*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3453*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3454*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3455*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3456*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3457*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3458*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3459*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
3460*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
3461*a9643ea8Slogwang __asm__ volatile( \
3462*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3463*a9643ea8Slogwang "mr 12,%1\n\t" \
3464*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3465*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3466*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3467*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3468*a9643ea8Slogwang "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3469*a9643ea8Slogwang "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3470*a9643ea8Slogwang "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3471*a9643ea8Slogwang "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3472*a9643ea8Slogwang "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3473*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3474*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3475*a9643ea8Slogwang "mr 12,%1\n\t" \
3476*a9643ea8Slogwang "mr %0,3\n\t" \
3477*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3478*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3479*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3480*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3481*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3482*a9643ea8Slogwang ); \
3483*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3484*a9643ea8Slogwang } while (0)
3485*a9643ea8Slogwang
3486*a9643ea8Slogwang #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3487*a9643ea8Slogwang arg7,arg8) \
3488*a9643ea8Slogwang do { \
3489*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3490*a9643ea8Slogwang volatile unsigned long _argvec[3+8]; \
3491*a9643ea8Slogwang volatile unsigned long _res; \
3492*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3493*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3494*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3495*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3496*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3497*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3498*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3499*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3500*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
3501*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
3502*a9643ea8Slogwang _argvec[2+8] = (unsigned long)arg8; \
3503*a9643ea8Slogwang __asm__ volatile( \
3504*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3505*a9643ea8Slogwang "mr 12,%1\n\t" \
3506*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3507*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3508*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3509*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3510*a9643ea8Slogwang "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3511*a9643ea8Slogwang "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3512*a9643ea8Slogwang "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3513*a9643ea8Slogwang "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3514*a9643ea8Slogwang "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3515*a9643ea8Slogwang "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3516*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3517*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3518*a9643ea8Slogwang "mr 12,%1\n\t" \
3519*a9643ea8Slogwang "mr %0,3\n\t" \
3520*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3521*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3522*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3523*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3524*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3525*a9643ea8Slogwang ); \
3526*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3527*a9643ea8Slogwang } while (0)
3528*a9643ea8Slogwang
3529*a9643ea8Slogwang #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3530*a9643ea8Slogwang arg7,arg8,arg9) \
3531*a9643ea8Slogwang do { \
3532*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3533*a9643ea8Slogwang volatile unsigned long _argvec[3+9]; \
3534*a9643ea8Slogwang volatile unsigned long _res; \
3535*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3536*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3537*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3538*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3539*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3540*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3541*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3542*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3543*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
3544*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
3545*a9643ea8Slogwang _argvec[2+8] = (unsigned long)arg8; \
3546*a9643ea8Slogwang _argvec[2+9] = (unsigned long)arg9; \
3547*a9643ea8Slogwang __asm__ volatile( \
3548*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3549*a9643ea8Slogwang "mr 12,%1\n\t" \
3550*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3551*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3552*a9643ea8Slogwang "addi 1,1,-128\n\t" /* expand stack frame */ \
3553*a9643ea8Slogwang /* arg9 */ \
3554*a9643ea8Slogwang "ld 3,72(12)\n\t" \
3555*a9643ea8Slogwang "std 3,96(1)\n\t" \
3556*a9643ea8Slogwang /* args1-8 */ \
3557*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3558*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3559*a9643ea8Slogwang "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3560*a9643ea8Slogwang "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3561*a9643ea8Slogwang "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3562*a9643ea8Slogwang "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3563*a9643ea8Slogwang "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3564*a9643ea8Slogwang "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3565*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3566*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3567*a9643ea8Slogwang "mr 12,%1\n\t" \
3568*a9643ea8Slogwang "mr %0,3\n\t" \
3569*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3570*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3571*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3572*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3573*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3574*a9643ea8Slogwang ); \
3575*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3576*a9643ea8Slogwang } while (0)
3577*a9643ea8Slogwang
3578*a9643ea8Slogwang #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3579*a9643ea8Slogwang arg7,arg8,arg9,arg10) \
3580*a9643ea8Slogwang do { \
3581*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3582*a9643ea8Slogwang volatile unsigned long _argvec[3+10]; \
3583*a9643ea8Slogwang volatile unsigned long _res; \
3584*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3585*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3586*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3587*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3588*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3589*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3590*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3591*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3592*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
3593*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
3594*a9643ea8Slogwang _argvec[2+8] = (unsigned long)arg8; \
3595*a9643ea8Slogwang _argvec[2+9] = (unsigned long)arg9; \
3596*a9643ea8Slogwang _argvec[2+10] = (unsigned long)arg10; \
3597*a9643ea8Slogwang __asm__ volatile( \
3598*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3599*a9643ea8Slogwang "mr 12,%1\n\t" \
3600*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3601*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3602*a9643ea8Slogwang "addi 1,1,-128\n\t" /* expand stack frame */ \
3603*a9643ea8Slogwang /* arg10 */ \
3604*a9643ea8Slogwang "ld 3,80(12)\n\t" \
3605*a9643ea8Slogwang "std 3,104(1)\n\t" \
3606*a9643ea8Slogwang /* arg9 */ \
3607*a9643ea8Slogwang "ld 3,72(12)\n\t" \
3608*a9643ea8Slogwang "std 3,96(1)\n\t" \
3609*a9643ea8Slogwang /* args1-8 */ \
3610*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3611*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3612*a9643ea8Slogwang "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3613*a9643ea8Slogwang "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3614*a9643ea8Slogwang "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3615*a9643ea8Slogwang "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3616*a9643ea8Slogwang "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3617*a9643ea8Slogwang "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3618*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3619*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3620*a9643ea8Slogwang "mr 12,%1\n\t" \
3621*a9643ea8Slogwang "mr %0,3\n\t" \
3622*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3623*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3624*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3625*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3626*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3627*a9643ea8Slogwang ); \
3628*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3629*a9643ea8Slogwang } while (0)
3630*a9643ea8Slogwang
3631*a9643ea8Slogwang #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3632*a9643ea8Slogwang arg7,arg8,arg9,arg10,arg11) \
3633*a9643ea8Slogwang do { \
3634*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3635*a9643ea8Slogwang volatile unsigned long _argvec[3+11]; \
3636*a9643ea8Slogwang volatile unsigned long _res; \
3637*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3638*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3639*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3640*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3641*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3642*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3643*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3644*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3645*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
3646*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
3647*a9643ea8Slogwang _argvec[2+8] = (unsigned long)arg8; \
3648*a9643ea8Slogwang _argvec[2+9] = (unsigned long)arg9; \
3649*a9643ea8Slogwang _argvec[2+10] = (unsigned long)arg10; \
3650*a9643ea8Slogwang _argvec[2+11] = (unsigned long)arg11; \
3651*a9643ea8Slogwang __asm__ volatile( \
3652*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3653*a9643ea8Slogwang "mr 12,%1\n\t" \
3654*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3655*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3656*a9643ea8Slogwang "addi 1,1,-144\n\t" /* expand stack frame */ \
3657*a9643ea8Slogwang /* arg11 */ \
3658*a9643ea8Slogwang "ld 3,88(12)\n\t" \
3659*a9643ea8Slogwang "std 3,112(1)\n\t" \
3660*a9643ea8Slogwang /* arg10 */ \
3661*a9643ea8Slogwang "ld 3,80(12)\n\t" \
3662*a9643ea8Slogwang "std 3,104(1)\n\t" \
3663*a9643ea8Slogwang /* arg9 */ \
3664*a9643ea8Slogwang "ld 3,72(12)\n\t" \
3665*a9643ea8Slogwang "std 3,96(1)\n\t" \
3666*a9643ea8Slogwang /* args1-8 */ \
3667*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3668*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3669*a9643ea8Slogwang "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3670*a9643ea8Slogwang "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3671*a9643ea8Slogwang "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3672*a9643ea8Slogwang "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3673*a9643ea8Slogwang "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3674*a9643ea8Slogwang "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3675*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3676*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3677*a9643ea8Slogwang "mr 12,%1\n\t" \
3678*a9643ea8Slogwang "mr %0,3\n\t" \
3679*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3680*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3681*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3682*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3683*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3684*a9643ea8Slogwang ); \
3685*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3686*a9643ea8Slogwang } while (0)
3687*a9643ea8Slogwang
3688*a9643ea8Slogwang #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3689*a9643ea8Slogwang arg7,arg8,arg9,arg10,arg11,arg12) \
3690*a9643ea8Slogwang do { \
3691*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3692*a9643ea8Slogwang volatile unsigned long _argvec[3+12]; \
3693*a9643ea8Slogwang volatile unsigned long _res; \
3694*a9643ea8Slogwang /* _argvec[0] holds current r2 across the call */ \
3695*a9643ea8Slogwang _argvec[1] = (unsigned long)_orig.r2; \
3696*a9643ea8Slogwang _argvec[2] = (unsigned long)_orig.nraddr; \
3697*a9643ea8Slogwang _argvec[2+1] = (unsigned long)arg1; \
3698*a9643ea8Slogwang _argvec[2+2] = (unsigned long)arg2; \
3699*a9643ea8Slogwang _argvec[2+3] = (unsigned long)arg3; \
3700*a9643ea8Slogwang _argvec[2+4] = (unsigned long)arg4; \
3701*a9643ea8Slogwang _argvec[2+5] = (unsigned long)arg5; \
3702*a9643ea8Slogwang _argvec[2+6] = (unsigned long)arg6; \
3703*a9643ea8Slogwang _argvec[2+7] = (unsigned long)arg7; \
3704*a9643ea8Slogwang _argvec[2+8] = (unsigned long)arg8; \
3705*a9643ea8Slogwang _argvec[2+9] = (unsigned long)arg9; \
3706*a9643ea8Slogwang _argvec[2+10] = (unsigned long)arg10; \
3707*a9643ea8Slogwang _argvec[2+11] = (unsigned long)arg11; \
3708*a9643ea8Slogwang _argvec[2+12] = (unsigned long)arg12; \
3709*a9643ea8Slogwang __asm__ volatile( \
3710*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3711*a9643ea8Slogwang "mr 12,%1\n\t" \
3712*a9643ea8Slogwang "std 2,-16(12)\n\t" /* save tocptr */ \
3713*a9643ea8Slogwang "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3714*a9643ea8Slogwang "addi 1,1,-144\n\t" /* expand stack frame */ \
3715*a9643ea8Slogwang /* arg12 */ \
3716*a9643ea8Slogwang "ld 3,96(12)\n\t" \
3717*a9643ea8Slogwang "std 3,120(1)\n\t" \
3718*a9643ea8Slogwang /* arg11 */ \
3719*a9643ea8Slogwang "ld 3,88(12)\n\t" \
3720*a9643ea8Slogwang "std 3,112(1)\n\t" \
3721*a9643ea8Slogwang /* arg10 */ \
3722*a9643ea8Slogwang "ld 3,80(12)\n\t" \
3723*a9643ea8Slogwang "std 3,104(1)\n\t" \
3724*a9643ea8Slogwang /* arg9 */ \
3725*a9643ea8Slogwang "ld 3,72(12)\n\t" \
3726*a9643ea8Slogwang "std 3,96(1)\n\t" \
3727*a9643ea8Slogwang /* args1-8 */ \
3728*a9643ea8Slogwang "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3729*a9643ea8Slogwang "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3730*a9643ea8Slogwang "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3731*a9643ea8Slogwang "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3732*a9643ea8Slogwang "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3733*a9643ea8Slogwang "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3734*a9643ea8Slogwang "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3735*a9643ea8Slogwang "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3736*a9643ea8Slogwang "ld 12, 0(12)\n\t" /* target->r12 */ \
3737*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3738*a9643ea8Slogwang "mr 12,%1\n\t" \
3739*a9643ea8Slogwang "mr %0,3\n\t" \
3740*a9643ea8Slogwang "ld 2,-16(12)\n\t" /* restore tocptr */ \
3741*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3742*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3743*a9643ea8Slogwang : /*in*/ "r" (&_argvec[2]) \
3744*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3745*a9643ea8Slogwang ); \
3746*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3747*a9643ea8Slogwang } while (0)
3748*a9643ea8Slogwang
3749*a9643ea8Slogwang #endif /* PLAT_ppc64le_linux */
3750*a9643ea8Slogwang
3751*a9643ea8Slogwang /* ------------------------- arm-linux ------------------------- */
3752*a9643ea8Slogwang
3753*a9643ea8Slogwang #if defined(PLAT_arm_linux)
3754*a9643ea8Slogwang
3755*a9643ea8Slogwang /* These regs are trashed by the hidden call. */
3756*a9643ea8Slogwang #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3757*a9643ea8Slogwang
3758*a9643ea8Slogwang /* Macros to save and align the stack before making a function
3759*a9643ea8Slogwang call and restore it afterwards as gcc may not keep the stack
3760*a9643ea8Slogwang pointer aligned if it doesn't realise calls are being made
3761*a9643ea8Slogwang to other functions. */
3762*a9643ea8Slogwang
3763*a9643ea8Slogwang /* This is a bit tricky. We store the original stack pointer in r10
3764*a9643ea8Slogwang as it is callee-saves. gcc doesn't allow the use of r11 for some
3765*a9643ea8Slogwang reason. Also, we can't directly "bic" the stack pointer in thumb
3766*a9643ea8Slogwang mode since r13 isn't an allowed register number in that context.
3767*a9643ea8Slogwang So use r4 as a temporary, since that is about to get trashed
3768*a9643ea8Slogwang anyway, just after each use of this macro. Side effect is we need
3769*a9643ea8Slogwang to be very careful about any future changes, since
3770*a9643ea8Slogwang VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3771*a9643ea8Slogwang #define VALGRIND_ALIGN_STACK \
3772*a9643ea8Slogwang "mov r10, sp\n\t" \
3773*a9643ea8Slogwang "mov r4, sp\n\t" \
3774*a9643ea8Slogwang "bic r4, r4, #7\n\t" \
3775*a9643ea8Slogwang "mov sp, r4\n\t"
3776*a9643ea8Slogwang #define VALGRIND_RESTORE_STACK \
3777*a9643ea8Slogwang "mov sp, r10\n\t"
3778*a9643ea8Slogwang
3779*a9643ea8Slogwang /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3780*a9643ea8Slogwang long) == 4. */
3781*a9643ea8Slogwang
3782*a9643ea8Slogwang #define CALL_FN_W_v(lval, orig) \
3783*a9643ea8Slogwang do { \
3784*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3785*a9643ea8Slogwang volatile unsigned long _argvec[1]; \
3786*a9643ea8Slogwang volatile unsigned long _res; \
3787*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
3788*a9643ea8Slogwang __asm__ volatile( \
3789*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3790*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
3791*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3792*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3793*a9643ea8Slogwang "mov %0, r0\n" \
3794*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3795*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
3796*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3797*a9643ea8Slogwang ); \
3798*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3799*a9643ea8Slogwang } while (0)
3800*a9643ea8Slogwang
3801*a9643ea8Slogwang #define CALL_FN_W_W(lval, orig, arg1) \
3802*a9643ea8Slogwang do { \
3803*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3804*a9643ea8Slogwang volatile unsigned long _argvec[2]; \
3805*a9643ea8Slogwang volatile unsigned long _res; \
3806*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
3807*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
3808*a9643ea8Slogwang __asm__ volatile( \
3809*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3810*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
3811*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
3812*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3813*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3814*a9643ea8Slogwang "mov %0, r0\n" \
3815*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3816*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
3817*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3818*a9643ea8Slogwang ); \
3819*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3820*a9643ea8Slogwang } while (0)
3821*a9643ea8Slogwang
3822*a9643ea8Slogwang #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3823*a9643ea8Slogwang do { \
3824*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3825*a9643ea8Slogwang volatile unsigned long _argvec[3]; \
3826*a9643ea8Slogwang volatile unsigned long _res; \
3827*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
3828*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
3829*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
3830*a9643ea8Slogwang __asm__ volatile( \
3831*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3832*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
3833*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
3834*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
3835*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3836*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3837*a9643ea8Slogwang "mov %0, r0\n" \
3838*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3839*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
3840*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3841*a9643ea8Slogwang ); \
3842*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3843*a9643ea8Slogwang } while (0)
3844*a9643ea8Slogwang
3845*a9643ea8Slogwang #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3846*a9643ea8Slogwang do { \
3847*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3848*a9643ea8Slogwang volatile unsigned long _argvec[4]; \
3849*a9643ea8Slogwang volatile unsigned long _res; \
3850*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
3851*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
3852*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
3853*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
3854*a9643ea8Slogwang __asm__ volatile( \
3855*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3856*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
3857*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
3858*a9643ea8Slogwang "ldr r2, [%1, #12] \n\t" \
3859*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
3860*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3861*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3862*a9643ea8Slogwang "mov %0, r0\n" \
3863*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3864*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
3865*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3866*a9643ea8Slogwang ); \
3867*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3868*a9643ea8Slogwang } while (0)
3869*a9643ea8Slogwang
3870*a9643ea8Slogwang #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3871*a9643ea8Slogwang do { \
3872*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3873*a9643ea8Slogwang volatile unsigned long _argvec[5]; \
3874*a9643ea8Slogwang volatile unsigned long _res; \
3875*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
3876*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
3877*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
3878*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
3879*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
3880*a9643ea8Slogwang __asm__ volatile( \
3881*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3882*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
3883*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
3884*a9643ea8Slogwang "ldr r2, [%1, #12] \n\t" \
3885*a9643ea8Slogwang "ldr r3, [%1, #16] \n\t" \
3886*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
3887*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3888*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3889*a9643ea8Slogwang "mov %0, r0" \
3890*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3891*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
3892*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3893*a9643ea8Slogwang ); \
3894*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3895*a9643ea8Slogwang } while (0)
3896*a9643ea8Slogwang
3897*a9643ea8Slogwang #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3898*a9643ea8Slogwang do { \
3899*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3900*a9643ea8Slogwang volatile unsigned long _argvec[6]; \
3901*a9643ea8Slogwang volatile unsigned long _res; \
3902*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
3903*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
3904*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
3905*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
3906*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
3907*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
3908*a9643ea8Slogwang __asm__ volatile( \
3909*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3910*a9643ea8Slogwang "sub sp, sp, #4 \n\t" \
3911*a9643ea8Slogwang "ldr r0, [%1, #20] \n\t" \
3912*a9643ea8Slogwang "push {r0} \n\t" \
3913*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
3914*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
3915*a9643ea8Slogwang "ldr r2, [%1, #12] \n\t" \
3916*a9643ea8Slogwang "ldr r3, [%1, #16] \n\t" \
3917*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
3918*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3919*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3920*a9643ea8Slogwang "mov %0, r0" \
3921*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3922*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
3923*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3924*a9643ea8Slogwang ); \
3925*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3926*a9643ea8Slogwang } while (0)
3927*a9643ea8Slogwang
3928*a9643ea8Slogwang #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3929*a9643ea8Slogwang do { \
3930*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3931*a9643ea8Slogwang volatile unsigned long _argvec[7]; \
3932*a9643ea8Slogwang volatile unsigned long _res; \
3933*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
3934*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
3935*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
3936*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
3937*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
3938*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
3939*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
3940*a9643ea8Slogwang __asm__ volatile( \
3941*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3942*a9643ea8Slogwang "ldr r0, [%1, #20] \n\t" \
3943*a9643ea8Slogwang "ldr r1, [%1, #24] \n\t" \
3944*a9643ea8Slogwang "push {r0, r1} \n\t" \
3945*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
3946*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
3947*a9643ea8Slogwang "ldr r2, [%1, #12] \n\t" \
3948*a9643ea8Slogwang "ldr r3, [%1, #16] \n\t" \
3949*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
3950*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3951*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3952*a9643ea8Slogwang "mov %0, r0" \
3953*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3954*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
3955*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3956*a9643ea8Slogwang ); \
3957*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3958*a9643ea8Slogwang } while (0)
3959*a9643ea8Slogwang
3960*a9643ea8Slogwang #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3961*a9643ea8Slogwang arg7) \
3962*a9643ea8Slogwang do { \
3963*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
3964*a9643ea8Slogwang volatile unsigned long _argvec[8]; \
3965*a9643ea8Slogwang volatile unsigned long _res; \
3966*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
3967*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
3968*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
3969*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
3970*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
3971*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
3972*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
3973*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
3974*a9643ea8Slogwang __asm__ volatile( \
3975*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
3976*a9643ea8Slogwang "sub sp, sp, #4 \n\t" \
3977*a9643ea8Slogwang "ldr r0, [%1, #20] \n\t" \
3978*a9643ea8Slogwang "ldr r1, [%1, #24] \n\t" \
3979*a9643ea8Slogwang "ldr r2, [%1, #28] \n\t" \
3980*a9643ea8Slogwang "push {r0, r1, r2} \n\t" \
3981*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
3982*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
3983*a9643ea8Slogwang "ldr r2, [%1, #12] \n\t" \
3984*a9643ea8Slogwang "ldr r3, [%1, #16] \n\t" \
3985*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
3986*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3987*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
3988*a9643ea8Slogwang "mov %0, r0" \
3989*a9643ea8Slogwang : /*out*/ "=r" (_res) \
3990*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
3991*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3992*a9643ea8Slogwang ); \
3993*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
3994*a9643ea8Slogwang } while (0)
3995*a9643ea8Slogwang
3996*a9643ea8Slogwang #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3997*a9643ea8Slogwang arg7,arg8) \
3998*a9643ea8Slogwang do { \
3999*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4000*a9643ea8Slogwang volatile unsigned long _argvec[9]; \
4001*a9643ea8Slogwang volatile unsigned long _res; \
4002*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4003*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4004*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4005*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4006*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4007*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4008*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4009*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4010*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
4011*a9643ea8Slogwang __asm__ volatile( \
4012*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4013*a9643ea8Slogwang "ldr r0, [%1, #20] \n\t" \
4014*a9643ea8Slogwang "ldr r1, [%1, #24] \n\t" \
4015*a9643ea8Slogwang "ldr r2, [%1, #28] \n\t" \
4016*a9643ea8Slogwang "ldr r3, [%1, #32] \n\t" \
4017*a9643ea8Slogwang "push {r0, r1, r2, r3} \n\t" \
4018*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
4019*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
4020*a9643ea8Slogwang "ldr r2, [%1, #12] \n\t" \
4021*a9643ea8Slogwang "ldr r3, [%1, #16] \n\t" \
4022*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
4023*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4024*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4025*a9643ea8Slogwang "mov %0, r0" \
4026*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4027*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4028*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4029*a9643ea8Slogwang ); \
4030*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4031*a9643ea8Slogwang } while (0)
4032*a9643ea8Slogwang
4033*a9643ea8Slogwang #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4034*a9643ea8Slogwang arg7,arg8,arg9) \
4035*a9643ea8Slogwang do { \
4036*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4037*a9643ea8Slogwang volatile unsigned long _argvec[10]; \
4038*a9643ea8Slogwang volatile unsigned long _res; \
4039*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4040*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4041*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4042*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4043*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4044*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4045*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4046*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4047*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
4048*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
4049*a9643ea8Slogwang __asm__ volatile( \
4050*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4051*a9643ea8Slogwang "sub sp, sp, #4 \n\t" \
4052*a9643ea8Slogwang "ldr r0, [%1, #20] \n\t" \
4053*a9643ea8Slogwang "ldr r1, [%1, #24] \n\t" \
4054*a9643ea8Slogwang "ldr r2, [%1, #28] \n\t" \
4055*a9643ea8Slogwang "ldr r3, [%1, #32] \n\t" \
4056*a9643ea8Slogwang "ldr r4, [%1, #36] \n\t" \
4057*a9643ea8Slogwang "push {r0, r1, r2, r3, r4} \n\t" \
4058*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
4059*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
4060*a9643ea8Slogwang "ldr r2, [%1, #12] \n\t" \
4061*a9643ea8Slogwang "ldr r3, [%1, #16] \n\t" \
4062*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
4063*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4064*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4065*a9643ea8Slogwang "mov %0, r0" \
4066*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4067*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4068*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4069*a9643ea8Slogwang ); \
4070*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4071*a9643ea8Slogwang } while (0)
4072*a9643ea8Slogwang
4073*a9643ea8Slogwang #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4074*a9643ea8Slogwang arg7,arg8,arg9,arg10) \
4075*a9643ea8Slogwang do { \
4076*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4077*a9643ea8Slogwang volatile unsigned long _argvec[11]; \
4078*a9643ea8Slogwang volatile unsigned long _res; \
4079*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4080*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4081*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4082*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4083*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4084*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4085*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4086*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4087*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
4088*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
4089*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
4090*a9643ea8Slogwang __asm__ volatile( \
4091*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4092*a9643ea8Slogwang "ldr r0, [%1, #40] \n\t" \
4093*a9643ea8Slogwang "push {r0} \n\t" \
4094*a9643ea8Slogwang "ldr r0, [%1, #20] \n\t" \
4095*a9643ea8Slogwang "ldr r1, [%1, #24] \n\t" \
4096*a9643ea8Slogwang "ldr r2, [%1, #28] \n\t" \
4097*a9643ea8Slogwang "ldr r3, [%1, #32] \n\t" \
4098*a9643ea8Slogwang "ldr r4, [%1, #36] \n\t" \
4099*a9643ea8Slogwang "push {r0, r1, r2, r3, r4} \n\t" \
4100*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
4101*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
4102*a9643ea8Slogwang "ldr r2, [%1, #12] \n\t" \
4103*a9643ea8Slogwang "ldr r3, [%1, #16] \n\t" \
4104*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
4105*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4106*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4107*a9643ea8Slogwang "mov %0, r0" \
4108*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4109*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4110*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4111*a9643ea8Slogwang ); \
4112*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4113*a9643ea8Slogwang } while (0)
4114*a9643ea8Slogwang
4115*a9643ea8Slogwang #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4116*a9643ea8Slogwang arg6,arg7,arg8,arg9,arg10, \
4117*a9643ea8Slogwang arg11) \
4118*a9643ea8Slogwang do { \
4119*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4120*a9643ea8Slogwang volatile unsigned long _argvec[12]; \
4121*a9643ea8Slogwang volatile unsigned long _res; \
4122*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4123*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4124*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4125*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4126*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4127*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4128*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4129*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4130*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
4131*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
4132*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
4133*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
4134*a9643ea8Slogwang __asm__ volatile( \
4135*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4136*a9643ea8Slogwang "sub sp, sp, #4 \n\t" \
4137*a9643ea8Slogwang "ldr r0, [%1, #40] \n\t" \
4138*a9643ea8Slogwang "ldr r1, [%1, #44] \n\t" \
4139*a9643ea8Slogwang "push {r0, r1} \n\t" \
4140*a9643ea8Slogwang "ldr r0, [%1, #20] \n\t" \
4141*a9643ea8Slogwang "ldr r1, [%1, #24] \n\t" \
4142*a9643ea8Slogwang "ldr r2, [%1, #28] \n\t" \
4143*a9643ea8Slogwang "ldr r3, [%1, #32] \n\t" \
4144*a9643ea8Slogwang "ldr r4, [%1, #36] \n\t" \
4145*a9643ea8Slogwang "push {r0, r1, r2, r3, r4} \n\t" \
4146*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
4147*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
4148*a9643ea8Slogwang "ldr r2, [%1, #12] \n\t" \
4149*a9643ea8Slogwang "ldr r3, [%1, #16] \n\t" \
4150*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
4151*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4152*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4153*a9643ea8Slogwang "mov %0, r0" \
4154*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4155*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4156*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4157*a9643ea8Slogwang ); \
4158*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4159*a9643ea8Slogwang } while (0)
4160*a9643ea8Slogwang
4161*a9643ea8Slogwang #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4162*a9643ea8Slogwang arg6,arg7,arg8,arg9,arg10, \
4163*a9643ea8Slogwang arg11,arg12) \
4164*a9643ea8Slogwang do { \
4165*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4166*a9643ea8Slogwang volatile unsigned long _argvec[13]; \
4167*a9643ea8Slogwang volatile unsigned long _res; \
4168*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4169*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4170*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4171*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4172*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4173*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4174*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4175*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4176*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
4177*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
4178*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
4179*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
4180*a9643ea8Slogwang _argvec[12] = (unsigned long)(arg12); \
4181*a9643ea8Slogwang __asm__ volatile( \
4182*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4183*a9643ea8Slogwang "ldr r0, [%1, #40] \n\t" \
4184*a9643ea8Slogwang "ldr r1, [%1, #44] \n\t" \
4185*a9643ea8Slogwang "ldr r2, [%1, #48] \n\t" \
4186*a9643ea8Slogwang "push {r0, r1, r2} \n\t" \
4187*a9643ea8Slogwang "ldr r0, [%1, #20] \n\t" \
4188*a9643ea8Slogwang "ldr r1, [%1, #24] \n\t" \
4189*a9643ea8Slogwang "ldr r2, [%1, #28] \n\t" \
4190*a9643ea8Slogwang "ldr r3, [%1, #32] \n\t" \
4191*a9643ea8Slogwang "ldr r4, [%1, #36] \n\t" \
4192*a9643ea8Slogwang "push {r0, r1, r2, r3, r4} \n\t" \
4193*a9643ea8Slogwang "ldr r0, [%1, #4] \n\t" \
4194*a9643ea8Slogwang "ldr r1, [%1, #8] \n\t" \
4195*a9643ea8Slogwang "ldr r2, [%1, #12] \n\t" \
4196*a9643ea8Slogwang "ldr r3, [%1, #16] \n\t" \
4197*a9643ea8Slogwang "ldr r4, [%1] \n\t" /* target->r4 */ \
4198*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4199*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4200*a9643ea8Slogwang "mov %0, r0" \
4201*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4202*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4203*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4204*a9643ea8Slogwang ); \
4205*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4206*a9643ea8Slogwang } while (0)
4207*a9643ea8Slogwang
4208*a9643ea8Slogwang #endif /* PLAT_arm_linux */
4209*a9643ea8Slogwang
4210*a9643ea8Slogwang /* ------------------------ arm64-linux ------------------------ */
4211*a9643ea8Slogwang
4212*a9643ea8Slogwang #if defined(PLAT_arm64_linux)
4213*a9643ea8Slogwang
4214*a9643ea8Slogwang /* These regs are trashed by the hidden call. */
4215*a9643ea8Slogwang #define __CALLER_SAVED_REGS \
4216*a9643ea8Slogwang "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4217*a9643ea8Slogwang "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4218*a9643ea8Slogwang "x18", "x19", "x20", "x30", \
4219*a9643ea8Slogwang "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4220*a9643ea8Slogwang "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4221*a9643ea8Slogwang "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4222*a9643ea8Slogwang "v26", "v27", "v28", "v29", "v30", "v31"
4223*a9643ea8Slogwang
4224*a9643ea8Slogwang /* x21 is callee-saved, so we can use it to save and restore SP around
4225*a9643ea8Slogwang the hidden call. */
4226*a9643ea8Slogwang #define VALGRIND_ALIGN_STACK \
4227*a9643ea8Slogwang "mov x21, sp\n\t" \
4228*a9643ea8Slogwang "bic sp, x21, #15\n\t"
4229*a9643ea8Slogwang #define VALGRIND_RESTORE_STACK \
4230*a9643ea8Slogwang "mov sp, x21\n\t"
4231*a9643ea8Slogwang
4232*a9643ea8Slogwang /* These CALL_FN_ macros assume that on arm64-linux,
4233*a9643ea8Slogwang sizeof(unsigned long) == 8. */
4234*a9643ea8Slogwang
4235*a9643ea8Slogwang #define CALL_FN_W_v(lval, orig) \
4236*a9643ea8Slogwang do { \
4237*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4238*a9643ea8Slogwang volatile unsigned long _argvec[1]; \
4239*a9643ea8Slogwang volatile unsigned long _res; \
4240*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4241*a9643ea8Slogwang __asm__ volatile( \
4242*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4243*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4244*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4245*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4246*a9643ea8Slogwang "mov %0, x0\n" \
4247*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4248*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4249*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4250*a9643ea8Slogwang ); \
4251*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4252*a9643ea8Slogwang } while (0)
4253*a9643ea8Slogwang
4254*a9643ea8Slogwang #define CALL_FN_W_W(lval, orig, arg1) \
4255*a9643ea8Slogwang do { \
4256*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4257*a9643ea8Slogwang volatile unsigned long _argvec[2]; \
4258*a9643ea8Slogwang volatile unsigned long _res; \
4259*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4260*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4261*a9643ea8Slogwang __asm__ volatile( \
4262*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4263*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4264*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4265*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4266*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4267*a9643ea8Slogwang "mov %0, x0\n" \
4268*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4269*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4270*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4271*a9643ea8Slogwang ); \
4272*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4273*a9643ea8Slogwang } while (0)
4274*a9643ea8Slogwang
4275*a9643ea8Slogwang #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4276*a9643ea8Slogwang do { \
4277*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4278*a9643ea8Slogwang volatile unsigned long _argvec[3]; \
4279*a9643ea8Slogwang volatile unsigned long _res; \
4280*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4281*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4282*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4283*a9643ea8Slogwang __asm__ volatile( \
4284*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4285*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4286*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4287*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4288*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4289*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4290*a9643ea8Slogwang "mov %0, x0\n" \
4291*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4292*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4293*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4294*a9643ea8Slogwang ); \
4295*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4296*a9643ea8Slogwang } while (0)
4297*a9643ea8Slogwang
4298*a9643ea8Slogwang #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4299*a9643ea8Slogwang do { \
4300*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4301*a9643ea8Slogwang volatile unsigned long _argvec[4]; \
4302*a9643ea8Slogwang volatile unsigned long _res; \
4303*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4304*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4305*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4306*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4307*a9643ea8Slogwang __asm__ volatile( \
4308*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4309*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4310*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4311*a9643ea8Slogwang "ldr x2, [%1, #24] \n\t" \
4312*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4313*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4314*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4315*a9643ea8Slogwang "mov %0, x0\n" \
4316*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4317*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4318*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4319*a9643ea8Slogwang ); \
4320*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4321*a9643ea8Slogwang } while (0)
4322*a9643ea8Slogwang
4323*a9643ea8Slogwang #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4324*a9643ea8Slogwang do { \
4325*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4326*a9643ea8Slogwang volatile unsigned long _argvec[5]; \
4327*a9643ea8Slogwang volatile unsigned long _res; \
4328*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4329*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4330*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4331*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4332*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4333*a9643ea8Slogwang __asm__ volatile( \
4334*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4335*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4336*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4337*a9643ea8Slogwang "ldr x2, [%1, #24] \n\t" \
4338*a9643ea8Slogwang "ldr x3, [%1, #32] \n\t" \
4339*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4340*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4341*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4342*a9643ea8Slogwang "mov %0, x0" \
4343*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4344*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4345*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4346*a9643ea8Slogwang ); \
4347*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4348*a9643ea8Slogwang } while (0)
4349*a9643ea8Slogwang
4350*a9643ea8Slogwang #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4351*a9643ea8Slogwang do { \
4352*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4353*a9643ea8Slogwang volatile unsigned long _argvec[6]; \
4354*a9643ea8Slogwang volatile unsigned long _res; \
4355*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4356*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4357*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4358*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4359*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4360*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4361*a9643ea8Slogwang __asm__ volatile( \
4362*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4363*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4364*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4365*a9643ea8Slogwang "ldr x2, [%1, #24] \n\t" \
4366*a9643ea8Slogwang "ldr x3, [%1, #32] \n\t" \
4367*a9643ea8Slogwang "ldr x4, [%1, #40] \n\t" \
4368*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4369*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4370*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4371*a9643ea8Slogwang "mov %0, x0" \
4372*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4373*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4374*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4375*a9643ea8Slogwang ); \
4376*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4377*a9643ea8Slogwang } while (0)
4378*a9643ea8Slogwang
4379*a9643ea8Slogwang #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4380*a9643ea8Slogwang do { \
4381*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4382*a9643ea8Slogwang volatile unsigned long _argvec[7]; \
4383*a9643ea8Slogwang volatile unsigned long _res; \
4384*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4385*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4386*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4387*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4388*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4389*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4390*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4391*a9643ea8Slogwang __asm__ volatile( \
4392*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4393*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4394*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4395*a9643ea8Slogwang "ldr x2, [%1, #24] \n\t" \
4396*a9643ea8Slogwang "ldr x3, [%1, #32] \n\t" \
4397*a9643ea8Slogwang "ldr x4, [%1, #40] \n\t" \
4398*a9643ea8Slogwang "ldr x5, [%1, #48] \n\t" \
4399*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4400*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4401*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4402*a9643ea8Slogwang "mov %0, x0" \
4403*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4404*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4405*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4406*a9643ea8Slogwang ); \
4407*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4408*a9643ea8Slogwang } while (0)
4409*a9643ea8Slogwang
4410*a9643ea8Slogwang #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4411*a9643ea8Slogwang arg7) \
4412*a9643ea8Slogwang do { \
4413*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4414*a9643ea8Slogwang volatile unsigned long _argvec[8]; \
4415*a9643ea8Slogwang volatile unsigned long _res; \
4416*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4417*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4418*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4419*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4420*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4421*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4422*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4423*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4424*a9643ea8Slogwang __asm__ volatile( \
4425*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4426*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4427*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4428*a9643ea8Slogwang "ldr x2, [%1, #24] \n\t" \
4429*a9643ea8Slogwang "ldr x3, [%1, #32] \n\t" \
4430*a9643ea8Slogwang "ldr x4, [%1, #40] \n\t" \
4431*a9643ea8Slogwang "ldr x5, [%1, #48] \n\t" \
4432*a9643ea8Slogwang "ldr x6, [%1, #56] \n\t" \
4433*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4434*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4435*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4436*a9643ea8Slogwang "mov %0, x0" \
4437*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4438*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4439*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4440*a9643ea8Slogwang ); \
4441*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4442*a9643ea8Slogwang } while (0)
4443*a9643ea8Slogwang
4444*a9643ea8Slogwang #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4445*a9643ea8Slogwang arg7,arg8) \
4446*a9643ea8Slogwang do { \
4447*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4448*a9643ea8Slogwang volatile unsigned long _argvec[9]; \
4449*a9643ea8Slogwang volatile unsigned long _res; \
4450*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4451*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4452*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4453*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4454*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4455*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4456*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4457*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4458*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
4459*a9643ea8Slogwang __asm__ volatile( \
4460*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4461*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4462*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4463*a9643ea8Slogwang "ldr x2, [%1, #24] \n\t" \
4464*a9643ea8Slogwang "ldr x3, [%1, #32] \n\t" \
4465*a9643ea8Slogwang "ldr x4, [%1, #40] \n\t" \
4466*a9643ea8Slogwang "ldr x5, [%1, #48] \n\t" \
4467*a9643ea8Slogwang "ldr x6, [%1, #56] \n\t" \
4468*a9643ea8Slogwang "ldr x7, [%1, #64] \n\t" \
4469*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4470*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4471*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4472*a9643ea8Slogwang "mov %0, x0" \
4473*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4474*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4475*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4476*a9643ea8Slogwang ); \
4477*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4478*a9643ea8Slogwang } while (0)
4479*a9643ea8Slogwang
4480*a9643ea8Slogwang #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4481*a9643ea8Slogwang arg7,arg8,arg9) \
4482*a9643ea8Slogwang do { \
4483*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4484*a9643ea8Slogwang volatile unsigned long _argvec[10]; \
4485*a9643ea8Slogwang volatile unsigned long _res; \
4486*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4487*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4488*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4489*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4490*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4491*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4492*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4493*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4494*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
4495*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
4496*a9643ea8Slogwang __asm__ volatile( \
4497*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4498*a9643ea8Slogwang "sub sp, sp, #0x20 \n\t" \
4499*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4500*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4501*a9643ea8Slogwang "ldr x2, [%1, #24] \n\t" \
4502*a9643ea8Slogwang "ldr x3, [%1, #32] \n\t" \
4503*a9643ea8Slogwang "ldr x4, [%1, #40] \n\t" \
4504*a9643ea8Slogwang "ldr x5, [%1, #48] \n\t" \
4505*a9643ea8Slogwang "ldr x6, [%1, #56] \n\t" \
4506*a9643ea8Slogwang "ldr x7, [%1, #64] \n\t" \
4507*a9643ea8Slogwang "ldr x8, [%1, #72] \n\t" \
4508*a9643ea8Slogwang "str x8, [sp, #0] \n\t" \
4509*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4510*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4511*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4512*a9643ea8Slogwang "mov %0, x0" \
4513*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4514*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4515*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4516*a9643ea8Slogwang ); \
4517*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4518*a9643ea8Slogwang } while (0)
4519*a9643ea8Slogwang
4520*a9643ea8Slogwang #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4521*a9643ea8Slogwang arg7,arg8,arg9,arg10) \
4522*a9643ea8Slogwang do { \
4523*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4524*a9643ea8Slogwang volatile unsigned long _argvec[11]; \
4525*a9643ea8Slogwang volatile unsigned long _res; \
4526*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4527*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4528*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4529*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4530*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4531*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4532*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4533*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4534*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
4535*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
4536*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
4537*a9643ea8Slogwang __asm__ volatile( \
4538*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4539*a9643ea8Slogwang "sub sp, sp, #0x20 \n\t" \
4540*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4541*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4542*a9643ea8Slogwang "ldr x2, [%1, #24] \n\t" \
4543*a9643ea8Slogwang "ldr x3, [%1, #32] \n\t" \
4544*a9643ea8Slogwang "ldr x4, [%1, #40] \n\t" \
4545*a9643ea8Slogwang "ldr x5, [%1, #48] \n\t" \
4546*a9643ea8Slogwang "ldr x6, [%1, #56] \n\t" \
4547*a9643ea8Slogwang "ldr x7, [%1, #64] \n\t" \
4548*a9643ea8Slogwang "ldr x8, [%1, #72] \n\t" \
4549*a9643ea8Slogwang "str x8, [sp, #0] \n\t" \
4550*a9643ea8Slogwang "ldr x8, [%1, #80] \n\t" \
4551*a9643ea8Slogwang "str x8, [sp, #8] \n\t" \
4552*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4553*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4554*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4555*a9643ea8Slogwang "mov %0, x0" \
4556*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4557*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4558*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4559*a9643ea8Slogwang ); \
4560*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4561*a9643ea8Slogwang } while (0)
4562*a9643ea8Slogwang
4563*a9643ea8Slogwang #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4564*a9643ea8Slogwang arg7,arg8,arg9,arg10,arg11) \
4565*a9643ea8Slogwang do { \
4566*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4567*a9643ea8Slogwang volatile unsigned long _argvec[12]; \
4568*a9643ea8Slogwang volatile unsigned long _res; \
4569*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4570*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4571*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4572*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4573*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4574*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4575*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4576*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4577*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
4578*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
4579*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
4580*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
4581*a9643ea8Slogwang __asm__ volatile( \
4582*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4583*a9643ea8Slogwang "sub sp, sp, #0x30 \n\t" \
4584*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4585*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4586*a9643ea8Slogwang "ldr x2, [%1, #24] \n\t" \
4587*a9643ea8Slogwang "ldr x3, [%1, #32] \n\t" \
4588*a9643ea8Slogwang "ldr x4, [%1, #40] \n\t" \
4589*a9643ea8Slogwang "ldr x5, [%1, #48] \n\t" \
4590*a9643ea8Slogwang "ldr x6, [%1, #56] \n\t" \
4591*a9643ea8Slogwang "ldr x7, [%1, #64] \n\t" \
4592*a9643ea8Slogwang "ldr x8, [%1, #72] \n\t" \
4593*a9643ea8Slogwang "str x8, [sp, #0] \n\t" \
4594*a9643ea8Slogwang "ldr x8, [%1, #80] \n\t" \
4595*a9643ea8Slogwang "str x8, [sp, #8] \n\t" \
4596*a9643ea8Slogwang "ldr x8, [%1, #88] \n\t" \
4597*a9643ea8Slogwang "str x8, [sp, #16] \n\t" \
4598*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4599*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4600*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4601*a9643ea8Slogwang "mov %0, x0" \
4602*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4603*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4604*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4605*a9643ea8Slogwang ); \
4606*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4607*a9643ea8Slogwang } while (0)
4608*a9643ea8Slogwang
4609*a9643ea8Slogwang #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4610*a9643ea8Slogwang arg7,arg8,arg9,arg10,arg11, \
4611*a9643ea8Slogwang arg12) \
4612*a9643ea8Slogwang do { \
4613*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4614*a9643ea8Slogwang volatile unsigned long _argvec[13]; \
4615*a9643ea8Slogwang volatile unsigned long _res; \
4616*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4617*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
4618*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
4619*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
4620*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
4621*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
4622*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
4623*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
4624*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
4625*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
4626*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
4627*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
4628*a9643ea8Slogwang _argvec[12] = (unsigned long)(arg12); \
4629*a9643ea8Slogwang __asm__ volatile( \
4630*a9643ea8Slogwang VALGRIND_ALIGN_STACK \
4631*a9643ea8Slogwang "sub sp, sp, #0x30 \n\t" \
4632*a9643ea8Slogwang "ldr x0, [%1, #8] \n\t" \
4633*a9643ea8Slogwang "ldr x1, [%1, #16] \n\t" \
4634*a9643ea8Slogwang "ldr x2, [%1, #24] \n\t" \
4635*a9643ea8Slogwang "ldr x3, [%1, #32] \n\t" \
4636*a9643ea8Slogwang "ldr x4, [%1, #40] \n\t" \
4637*a9643ea8Slogwang "ldr x5, [%1, #48] \n\t" \
4638*a9643ea8Slogwang "ldr x6, [%1, #56] \n\t" \
4639*a9643ea8Slogwang "ldr x7, [%1, #64] \n\t" \
4640*a9643ea8Slogwang "ldr x8, [%1, #72] \n\t" \
4641*a9643ea8Slogwang "str x8, [sp, #0] \n\t" \
4642*a9643ea8Slogwang "ldr x8, [%1, #80] \n\t" \
4643*a9643ea8Slogwang "str x8, [sp, #8] \n\t" \
4644*a9643ea8Slogwang "ldr x8, [%1, #88] \n\t" \
4645*a9643ea8Slogwang "str x8, [sp, #16] \n\t" \
4646*a9643ea8Slogwang "ldr x8, [%1, #96] \n\t" \
4647*a9643ea8Slogwang "str x8, [sp, #24] \n\t" \
4648*a9643ea8Slogwang "ldr x8, [%1] \n\t" /* target->x8 */ \
4649*a9643ea8Slogwang VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4650*a9643ea8Slogwang VALGRIND_RESTORE_STACK \
4651*a9643ea8Slogwang "mov %0, x0" \
4652*a9643ea8Slogwang : /*out*/ "=r" (_res) \
4653*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
4654*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4655*a9643ea8Slogwang ); \
4656*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4657*a9643ea8Slogwang } while (0)
4658*a9643ea8Slogwang
4659*a9643ea8Slogwang #endif /* PLAT_arm64_linux */
4660*a9643ea8Slogwang
4661*a9643ea8Slogwang /* ------------------------- s390x-linux ------------------------- */
4662*a9643ea8Slogwang
4663*a9643ea8Slogwang #if defined(PLAT_s390x_linux)
4664*a9643ea8Slogwang
4665*a9643ea8Slogwang /* Similar workaround as amd64 (see above), but we use r11 as frame
4666*a9643ea8Slogwang pointer and save the old r11 in r7. r11 might be used for
4667*a9643ea8Slogwang argvec, therefore we copy argvec in r1 since r1 is clobbered
4668*a9643ea8Slogwang after the call anyway. */
4669*a9643ea8Slogwang #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4670*a9643ea8Slogwang # define __FRAME_POINTER \
4671*a9643ea8Slogwang ,"d"(__builtin_dwarf_cfa())
4672*a9643ea8Slogwang # define VALGRIND_CFI_PROLOGUE \
4673*a9643ea8Slogwang ".cfi_remember_state\n\t" \
4674*a9643ea8Slogwang "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4675*a9643ea8Slogwang "lgr 7,11\n\t" \
4676*a9643ea8Slogwang "lgr 11,%2\n\t" \
4677*a9643ea8Slogwang ".cfi_def_cfa r11, 0\n\t"
4678*a9643ea8Slogwang # define VALGRIND_CFI_EPILOGUE \
4679*a9643ea8Slogwang "lgr 11, 7\n\t" \
4680*a9643ea8Slogwang ".cfi_restore_state\n\t"
4681*a9643ea8Slogwang #else
4682*a9643ea8Slogwang # define __FRAME_POINTER
4683*a9643ea8Slogwang # define VALGRIND_CFI_PROLOGUE \
4684*a9643ea8Slogwang "lgr 1,%1\n\t"
4685*a9643ea8Slogwang # define VALGRIND_CFI_EPILOGUE
4686*a9643ea8Slogwang #endif
4687*a9643ea8Slogwang
4688*a9643ea8Slogwang /* Nb: On s390 the stack pointer is properly aligned *at all times*
4689*a9643ea8Slogwang according to the s390 GCC maintainer. (The ABI specification is not
4690*a9643ea8Slogwang precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4691*a9643ea8Slogwang VALGRIND_RESTORE_STACK are not defined here. */
4692*a9643ea8Slogwang
4693*a9643ea8Slogwang /* These regs are trashed by the hidden call. Note that we overwrite
4694*a9643ea8Slogwang r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4695*a9643ea8Slogwang function a proper return address. All others are ABI defined call
4696*a9643ea8Slogwang clobbers. */
4697*a9643ea8Slogwang #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4698*a9643ea8Slogwang "f0","f1","f2","f3","f4","f5","f6","f7"
4699*a9643ea8Slogwang
4700*a9643ea8Slogwang /* Nb: Although r11 is modified in the asm snippets below (inside
4701*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4702*a9643ea8Slogwang two reasons:
4703*a9643ea8Slogwang (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4704*a9643ea8Slogwang modified
4705*a9643ea8Slogwang (2) GCC will complain that r11 cannot appear inside a clobber section,
4706*a9643ea8Slogwang when compiled with -O -fno-omit-frame-pointer
4707*a9643ea8Slogwang */
4708*a9643ea8Slogwang
4709*a9643ea8Slogwang #define CALL_FN_W_v(lval, orig) \
4710*a9643ea8Slogwang do { \
4711*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4712*a9643ea8Slogwang volatile unsigned long _argvec[1]; \
4713*a9643ea8Slogwang volatile unsigned long _res; \
4714*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4715*a9643ea8Slogwang __asm__ volatile( \
4716*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
4717*a9643ea8Slogwang "aghi 15,-160\n\t" \
4718*a9643ea8Slogwang "lg 1, 0(1)\n\t" /* target->r1 */ \
4719*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
4720*a9643ea8Slogwang "lgr %0, 2\n\t" \
4721*a9643ea8Slogwang "aghi 15,160\n\t" \
4722*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
4723*a9643ea8Slogwang : /*out*/ "=d" (_res) \
4724*a9643ea8Slogwang : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4725*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4726*a9643ea8Slogwang ); \
4727*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4728*a9643ea8Slogwang } while (0)
4729*a9643ea8Slogwang
4730*a9643ea8Slogwang /* The call abi has the arguments in r2-r6 and stack */
4731*a9643ea8Slogwang #define CALL_FN_W_W(lval, orig, arg1) \
4732*a9643ea8Slogwang do { \
4733*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4734*a9643ea8Slogwang volatile unsigned long _argvec[2]; \
4735*a9643ea8Slogwang volatile unsigned long _res; \
4736*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4737*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
4738*a9643ea8Slogwang __asm__ volatile( \
4739*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
4740*a9643ea8Slogwang "aghi 15,-160\n\t" \
4741*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
4742*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
4743*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
4744*a9643ea8Slogwang "lgr %0, 2\n\t" \
4745*a9643ea8Slogwang "aghi 15,160\n\t" \
4746*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
4747*a9643ea8Slogwang : /*out*/ "=d" (_res) \
4748*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4749*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4750*a9643ea8Slogwang ); \
4751*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4752*a9643ea8Slogwang } while (0)
4753*a9643ea8Slogwang
4754*a9643ea8Slogwang #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4755*a9643ea8Slogwang do { \
4756*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4757*a9643ea8Slogwang volatile unsigned long _argvec[3]; \
4758*a9643ea8Slogwang volatile unsigned long _res; \
4759*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4760*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
4761*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
4762*a9643ea8Slogwang __asm__ volatile( \
4763*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
4764*a9643ea8Slogwang "aghi 15,-160\n\t" \
4765*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
4766*a9643ea8Slogwang "lg 3,16(1)\n\t" \
4767*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
4768*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
4769*a9643ea8Slogwang "lgr %0, 2\n\t" \
4770*a9643ea8Slogwang "aghi 15,160\n\t" \
4771*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
4772*a9643ea8Slogwang : /*out*/ "=d" (_res) \
4773*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4774*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4775*a9643ea8Slogwang ); \
4776*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4777*a9643ea8Slogwang } while (0)
4778*a9643ea8Slogwang
4779*a9643ea8Slogwang #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4780*a9643ea8Slogwang do { \
4781*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4782*a9643ea8Slogwang volatile unsigned long _argvec[4]; \
4783*a9643ea8Slogwang volatile unsigned long _res; \
4784*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4785*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
4786*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
4787*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
4788*a9643ea8Slogwang __asm__ volatile( \
4789*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
4790*a9643ea8Slogwang "aghi 15,-160\n\t" \
4791*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
4792*a9643ea8Slogwang "lg 3,16(1)\n\t" \
4793*a9643ea8Slogwang "lg 4,24(1)\n\t" \
4794*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
4795*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
4796*a9643ea8Slogwang "lgr %0, 2\n\t" \
4797*a9643ea8Slogwang "aghi 15,160\n\t" \
4798*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
4799*a9643ea8Slogwang : /*out*/ "=d" (_res) \
4800*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4801*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4802*a9643ea8Slogwang ); \
4803*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4804*a9643ea8Slogwang } while (0)
4805*a9643ea8Slogwang
4806*a9643ea8Slogwang #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4807*a9643ea8Slogwang do { \
4808*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4809*a9643ea8Slogwang volatile unsigned long _argvec[5]; \
4810*a9643ea8Slogwang volatile unsigned long _res; \
4811*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4812*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
4813*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
4814*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
4815*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
4816*a9643ea8Slogwang __asm__ volatile( \
4817*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
4818*a9643ea8Slogwang "aghi 15,-160\n\t" \
4819*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
4820*a9643ea8Slogwang "lg 3,16(1)\n\t" \
4821*a9643ea8Slogwang "lg 4,24(1)\n\t" \
4822*a9643ea8Slogwang "lg 5,32(1)\n\t" \
4823*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
4824*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
4825*a9643ea8Slogwang "lgr %0, 2\n\t" \
4826*a9643ea8Slogwang "aghi 15,160\n\t" \
4827*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
4828*a9643ea8Slogwang : /*out*/ "=d" (_res) \
4829*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4830*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4831*a9643ea8Slogwang ); \
4832*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4833*a9643ea8Slogwang } while (0)
4834*a9643ea8Slogwang
4835*a9643ea8Slogwang #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4836*a9643ea8Slogwang do { \
4837*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4838*a9643ea8Slogwang volatile unsigned long _argvec[6]; \
4839*a9643ea8Slogwang volatile unsigned long _res; \
4840*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4841*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
4842*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
4843*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
4844*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
4845*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
4846*a9643ea8Slogwang __asm__ volatile( \
4847*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
4848*a9643ea8Slogwang "aghi 15,-160\n\t" \
4849*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
4850*a9643ea8Slogwang "lg 3,16(1)\n\t" \
4851*a9643ea8Slogwang "lg 4,24(1)\n\t" \
4852*a9643ea8Slogwang "lg 5,32(1)\n\t" \
4853*a9643ea8Slogwang "lg 6,40(1)\n\t" \
4854*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
4855*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
4856*a9643ea8Slogwang "lgr %0, 2\n\t" \
4857*a9643ea8Slogwang "aghi 15,160\n\t" \
4858*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
4859*a9643ea8Slogwang : /*out*/ "=d" (_res) \
4860*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4861*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4862*a9643ea8Slogwang ); \
4863*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4864*a9643ea8Slogwang } while (0)
4865*a9643ea8Slogwang
4866*a9643ea8Slogwang #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4867*a9643ea8Slogwang arg6) \
4868*a9643ea8Slogwang do { \
4869*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4870*a9643ea8Slogwang volatile unsigned long _argvec[7]; \
4871*a9643ea8Slogwang volatile unsigned long _res; \
4872*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4873*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
4874*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
4875*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
4876*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
4877*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
4878*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
4879*a9643ea8Slogwang __asm__ volatile( \
4880*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
4881*a9643ea8Slogwang "aghi 15,-168\n\t" \
4882*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
4883*a9643ea8Slogwang "lg 3,16(1)\n\t" \
4884*a9643ea8Slogwang "lg 4,24(1)\n\t" \
4885*a9643ea8Slogwang "lg 5,32(1)\n\t" \
4886*a9643ea8Slogwang "lg 6,40(1)\n\t" \
4887*a9643ea8Slogwang "mvc 160(8,15), 48(1)\n\t" \
4888*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
4889*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
4890*a9643ea8Slogwang "lgr %0, 2\n\t" \
4891*a9643ea8Slogwang "aghi 15,168\n\t" \
4892*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
4893*a9643ea8Slogwang : /*out*/ "=d" (_res) \
4894*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4895*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4896*a9643ea8Slogwang ); \
4897*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4898*a9643ea8Slogwang } while (0)
4899*a9643ea8Slogwang
4900*a9643ea8Slogwang #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4901*a9643ea8Slogwang arg6, arg7) \
4902*a9643ea8Slogwang do { \
4903*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4904*a9643ea8Slogwang volatile unsigned long _argvec[8]; \
4905*a9643ea8Slogwang volatile unsigned long _res; \
4906*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4907*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
4908*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
4909*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
4910*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
4911*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
4912*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
4913*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
4914*a9643ea8Slogwang __asm__ volatile( \
4915*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
4916*a9643ea8Slogwang "aghi 15,-176\n\t" \
4917*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
4918*a9643ea8Slogwang "lg 3,16(1)\n\t" \
4919*a9643ea8Slogwang "lg 4,24(1)\n\t" \
4920*a9643ea8Slogwang "lg 5,32(1)\n\t" \
4921*a9643ea8Slogwang "lg 6,40(1)\n\t" \
4922*a9643ea8Slogwang "mvc 160(8,15), 48(1)\n\t" \
4923*a9643ea8Slogwang "mvc 168(8,15), 56(1)\n\t" \
4924*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
4925*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
4926*a9643ea8Slogwang "lgr %0, 2\n\t" \
4927*a9643ea8Slogwang "aghi 15,176\n\t" \
4928*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
4929*a9643ea8Slogwang : /*out*/ "=d" (_res) \
4930*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4931*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4932*a9643ea8Slogwang ); \
4933*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4934*a9643ea8Slogwang } while (0)
4935*a9643ea8Slogwang
4936*a9643ea8Slogwang #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4937*a9643ea8Slogwang arg6, arg7 ,arg8) \
4938*a9643ea8Slogwang do { \
4939*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4940*a9643ea8Slogwang volatile unsigned long _argvec[9]; \
4941*a9643ea8Slogwang volatile unsigned long _res; \
4942*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4943*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
4944*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
4945*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
4946*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
4947*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
4948*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
4949*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
4950*a9643ea8Slogwang _argvec[8] = (unsigned long)arg8; \
4951*a9643ea8Slogwang __asm__ volatile( \
4952*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
4953*a9643ea8Slogwang "aghi 15,-184\n\t" \
4954*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
4955*a9643ea8Slogwang "lg 3,16(1)\n\t" \
4956*a9643ea8Slogwang "lg 4,24(1)\n\t" \
4957*a9643ea8Slogwang "lg 5,32(1)\n\t" \
4958*a9643ea8Slogwang "lg 6,40(1)\n\t" \
4959*a9643ea8Slogwang "mvc 160(8,15), 48(1)\n\t" \
4960*a9643ea8Slogwang "mvc 168(8,15), 56(1)\n\t" \
4961*a9643ea8Slogwang "mvc 176(8,15), 64(1)\n\t" \
4962*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
4963*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
4964*a9643ea8Slogwang "lgr %0, 2\n\t" \
4965*a9643ea8Slogwang "aghi 15,184\n\t" \
4966*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
4967*a9643ea8Slogwang : /*out*/ "=d" (_res) \
4968*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4969*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4970*a9643ea8Slogwang ); \
4971*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
4972*a9643ea8Slogwang } while (0)
4973*a9643ea8Slogwang
4974*a9643ea8Slogwang #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4975*a9643ea8Slogwang arg6, arg7 ,arg8, arg9) \
4976*a9643ea8Slogwang do { \
4977*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
4978*a9643ea8Slogwang volatile unsigned long _argvec[10]; \
4979*a9643ea8Slogwang volatile unsigned long _res; \
4980*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
4981*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
4982*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
4983*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
4984*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
4985*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
4986*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
4987*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
4988*a9643ea8Slogwang _argvec[8] = (unsigned long)arg8; \
4989*a9643ea8Slogwang _argvec[9] = (unsigned long)arg9; \
4990*a9643ea8Slogwang __asm__ volatile( \
4991*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
4992*a9643ea8Slogwang "aghi 15,-192\n\t" \
4993*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
4994*a9643ea8Slogwang "lg 3,16(1)\n\t" \
4995*a9643ea8Slogwang "lg 4,24(1)\n\t" \
4996*a9643ea8Slogwang "lg 5,32(1)\n\t" \
4997*a9643ea8Slogwang "lg 6,40(1)\n\t" \
4998*a9643ea8Slogwang "mvc 160(8,15), 48(1)\n\t" \
4999*a9643ea8Slogwang "mvc 168(8,15), 56(1)\n\t" \
5000*a9643ea8Slogwang "mvc 176(8,15), 64(1)\n\t" \
5001*a9643ea8Slogwang "mvc 184(8,15), 72(1)\n\t" \
5002*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
5003*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
5004*a9643ea8Slogwang "lgr %0, 2\n\t" \
5005*a9643ea8Slogwang "aghi 15,192\n\t" \
5006*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
5007*a9643ea8Slogwang : /*out*/ "=d" (_res) \
5008*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5009*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5010*a9643ea8Slogwang ); \
5011*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5012*a9643ea8Slogwang } while (0)
5013*a9643ea8Slogwang
5014*a9643ea8Slogwang #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5015*a9643ea8Slogwang arg6, arg7 ,arg8, arg9, arg10) \
5016*a9643ea8Slogwang do { \
5017*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5018*a9643ea8Slogwang volatile unsigned long _argvec[11]; \
5019*a9643ea8Slogwang volatile unsigned long _res; \
5020*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5021*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
5022*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
5023*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
5024*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
5025*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
5026*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
5027*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
5028*a9643ea8Slogwang _argvec[8] = (unsigned long)arg8; \
5029*a9643ea8Slogwang _argvec[9] = (unsigned long)arg9; \
5030*a9643ea8Slogwang _argvec[10] = (unsigned long)arg10; \
5031*a9643ea8Slogwang __asm__ volatile( \
5032*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
5033*a9643ea8Slogwang "aghi 15,-200\n\t" \
5034*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
5035*a9643ea8Slogwang "lg 3,16(1)\n\t" \
5036*a9643ea8Slogwang "lg 4,24(1)\n\t" \
5037*a9643ea8Slogwang "lg 5,32(1)\n\t" \
5038*a9643ea8Slogwang "lg 6,40(1)\n\t" \
5039*a9643ea8Slogwang "mvc 160(8,15), 48(1)\n\t" \
5040*a9643ea8Slogwang "mvc 168(8,15), 56(1)\n\t" \
5041*a9643ea8Slogwang "mvc 176(8,15), 64(1)\n\t" \
5042*a9643ea8Slogwang "mvc 184(8,15), 72(1)\n\t" \
5043*a9643ea8Slogwang "mvc 192(8,15), 80(1)\n\t" \
5044*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
5045*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
5046*a9643ea8Slogwang "lgr %0, 2\n\t" \
5047*a9643ea8Slogwang "aghi 15,200\n\t" \
5048*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
5049*a9643ea8Slogwang : /*out*/ "=d" (_res) \
5050*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5051*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5052*a9643ea8Slogwang ); \
5053*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5054*a9643ea8Slogwang } while (0)
5055*a9643ea8Slogwang
5056*a9643ea8Slogwang #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5057*a9643ea8Slogwang arg6, arg7 ,arg8, arg9, arg10, arg11) \
5058*a9643ea8Slogwang do { \
5059*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5060*a9643ea8Slogwang volatile unsigned long _argvec[12]; \
5061*a9643ea8Slogwang volatile unsigned long _res; \
5062*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5063*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
5064*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
5065*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
5066*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
5067*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
5068*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
5069*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
5070*a9643ea8Slogwang _argvec[8] = (unsigned long)arg8; \
5071*a9643ea8Slogwang _argvec[9] = (unsigned long)arg9; \
5072*a9643ea8Slogwang _argvec[10] = (unsigned long)arg10; \
5073*a9643ea8Slogwang _argvec[11] = (unsigned long)arg11; \
5074*a9643ea8Slogwang __asm__ volatile( \
5075*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
5076*a9643ea8Slogwang "aghi 15,-208\n\t" \
5077*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
5078*a9643ea8Slogwang "lg 3,16(1)\n\t" \
5079*a9643ea8Slogwang "lg 4,24(1)\n\t" \
5080*a9643ea8Slogwang "lg 5,32(1)\n\t" \
5081*a9643ea8Slogwang "lg 6,40(1)\n\t" \
5082*a9643ea8Slogwang "mvc 160(8,15), 48(1)\n\t" \
5083*a9643ea8Slogwang "mvc 168(8,15), 56(1)\n\t" \
5084*a9643ea8Slogwang "mvc 176(8,15), 64(1)\n\t" \
5085*a9643ea8Slogwang "mvc 184(8,15), 72(1)\n\t" \
5086*a9643ea8Slogwang "mvc 192(8,15), 80(1)\n\t" \
5087*a9643ea8Slogwang "mvc 200(8,15), 88(1)\n\t" \
5088*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
5089*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
5090*a9643ea8Slogwang "lgr %0, 2\n\t" \
5091*a9643ea8Slogwang "aghi 15,208\n\t" \
5092*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
5093*a9643ea8Slogwang : /*out*/ "=d" (_res) \
5094*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5095*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5096*a9643ea8Slogwang ); \
5097*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5098*a9643ea8Slogwang } while (0)
5099*a9643ea8Slogwang
5100*a9643ea8Slogwang #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5101*a9643ea8Slogwang arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5102*a9643ea8Slogwang do { \
5103*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5104*a9643ea8Slogwang volatile unsigned long _argvec[13]; \
5105*a9643ea8Slogwang volatile unsigned long _res; \
5106*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5107*a9643ea8Slogwang _argvec[1] = (unsigned long)arg1; \
5108*a9643ea8Slogwang _argvec[2] = (unsigned long)arg2; \
5109*a9643ea8Slogwang _argvec[3] = (unsigned long)arg3; \
5110*a9643ea8Slogwang _argvec[4] = (unsigned long)arg4; \
5111*a9643ea8Slogwang _argvec[5] = (unsigned long)arg5; \
5112*a9643ea8Slogwang _argvec[6] = (unsigned long)arg6; \
5113*a9643ea8Slogwang _argvec[7] = (unsigned long)arg7; \
5114*a9643ea8Slogwang _argvec[8] = (unsigned long)arg8; \
5115*a9643ea8Slogwang _argvec[9] = (unsigned long)arg9; \
5116*a9643ea8Slogwang _argvec[10] = (unsigned long)arg10; \
5117*a9643ea8Slogwang _argvec[11] = (unsigned long)arg11; \
5118*a9643ea8Slogwang _argvec[12] = (unsigned long)arg12; \
5119*a9643ea8Slogwang __asm__ volatile( \
5120*a9643ea8Slogwang VALGRIND_CFI_PROLOGUE \
5121*a9643ea8Slogwang "aghi 15,-216\n\t" \
5122*a9643ea8Slogwang "lg 2, 8(1)\n\t" \
5123*a9643ea8Slogwang "lg 3,16(1)\n\t" \
5124*a9643ea8Slogwang "lg 4,24(1)\n\t" \
5125*a9643ea8Slogwang "lg 5,32(1)\n\t" \
5126*a9643ea8Slogwang "lg 6,40(1)\n\t" \
5127*a9643ea8Slogwang "mvc 160(8,15), 48(1)\n\t" \
5128*a9643ea8Slogwang "mvc 168(8,15), 56(1)\n\t" \
5129*a9643ea8Slogwang "mvc 176(8,15), 64(1)\n\t" \
5130*a9643ea8Slogwang "mvc 184(8,15), 72(1)\n\t" \
5131*a9643ea8Slogwang "mvc 192(8,15), 80(1)\n\t" \
5132*a9643ea8Slogwang "mvc 200(8,15), 88(1)\n\t" \
5133*a9643ea8Slogwang "mvc 208(8,15), 96(1)\n\t" \
5134*a9643ea8Slogwang "lg 1, 0(1)\n\t" \
5135*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_R1 \
5136*a9643ea8Slogwang "lgr %0, 2\n\t" \
5137*a9643ea8Slogwang "aghi 15,216\n\t" \
5138*a9643ea8Slogwang VALGRIND_CFI_EPILOGUE \
5139*a9643ea8Slogwang : /*out*/ "=d" (_res) \
5140*a9643ea8Slogwang : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5141*a9643ea8Slogwang : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5142*a9643ea8Slogwang ); \
5143*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5144*a9643ea8Slogwang } while (0)
5145*a9643ea8Slogwang
5146*a9643ea8Slogwang
5147*a9643ea8Slogwang #endif /* PLAT_s390x_linux */
5148*a9643ea8Slogwang
5149*a9643ea8Slogwang /* ------------------------- mips32-linux ----------------------- */
5150*a9643ea8Slogwang
5151*a9643ea8Slogwang #if defined(PLAT_mips32_linux)
5152*a9643ea8Slogwang
5153*a9643ea8Slogwang /* These regs are trashed by the hidden call. */
5154*a9643ea8Slogwang #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5155*a9643ea8Slogwang "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5156*a9643ea8Slogwang "$25", "$31"
5157*a9643ea8Slogwang
5158*a9643ea8Slogwang /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5159*a9643ea8Slogwang long) == 4. */
5160*a9643ea8Slogwang
5161*a9643ea8Slogwang #define CALL_FN_W_v(lval, orig) \
5162*a9643ea8Slogwang do { \
5163*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5164*a9643ea8Slogwang volatile unsigned long _argvec[1]; \
5165*a9643ea8Slogwang volatile unsigned long _res; \
5166*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5167*a9643ea8Slogwang __asm__ volatile( \
5168*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5169*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5170*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5171*a9643ea8Slogwang "subu $29, $29, 16 \n\t" \
5172*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5173*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5174*a9643ea8Slogwang "addu $29, $29, 16\n\t" \
5175*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5176*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5177*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5178*a9643ea8Slogwang "move %0, $2\n" \
5179*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5180*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5181*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5182*a9643ea8Slogwang ); \
5183*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5184*a9643ea8Slogwang } while (0)
5185*a9643ea8Slogwang
5186*a9643ea8Slogwang #define CALL_FN_W_W(lval, orig, arg1) \
5187*a9643ea8Slogwang do { \
5188*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5189*a9643ea8Slogwang volatile unsigned long _argvec[2]; \
5190*a9643ea8Slogwang volatile unsigned long _res; \
5191*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5192*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5193*a9643ea8Slogwang __asm__ volatile( \
5194*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5195*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5196*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5197*a9643ea8Slogwang "subu $29, $29, 16 \n\t" \
5198*a9643ea8Slogwang "lw $4, 4(%1) \n\t" /* arg1*/ \
5199*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5200*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5201*a9643ea8Slogwang "addu $29, $29, 16 \n\t" \
5202*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5203*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5204*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5205*a9643ea8Slogwang "move %0, $2\n" \
5206*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5207*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5208*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5209*a9643ea8Slogwang ); \
5210*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5211*a9643ea8Slogwang } while (0)
5212*a9643ea8Slogwang
5213*a9643ea8Slogwang #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5214*a9643ea8Slogwang do { \
5215*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5216*a9643ea8Slogwang volatile unsigned long _argvec[3]; \
5217*a9643ea8Slogwang volatile unsigned long _res; \
5218*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5219*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5220*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5221*a9643ea8Slogwang __asm__ volatile( \
5222*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5223*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5224*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5225*a9643ea8Slogwang "subu $29, $29, 16 \n\t" \
5226*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5227*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5228*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5229*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5230*a9643ea8Slogwang "addu $29, $29, 16 \n\t" \
5231*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5232*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5233*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5234*a9643ea8Slogwang "move %0, $2\n" \
5235*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5236*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5237*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5238*a9643ea8Slogwang ); \
5239*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5240*a9643ea8Slogwang } while (0)
5241*a9643ea8Slogwang
5242*a9643ea8Slogwang #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5243*a9643ea8Slogwang do { \
5244*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5245*a9643ea8Slogwang volatile unsigned long _argvec[4]; \
5246*a9643ea8Slogwang volatile unsigned long _res; \
5247*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5248*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5249*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5250*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5251*a9643ea8Slogwang __asm__ volatile( \
5252*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5253*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5254*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5255*a9643ea8Slogwang "subu $29, $29, 16 \n\t" \
5256*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5257*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5258*a9643ea8Slogwang "lw $6, 12(%1) \n\t" \
5259*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5260*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5261*a9643ea8Slogwang "addu $29, $29, 16 \n\t" \
5262*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5263*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5264*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5265*a9643ea8Slogwang "move %0, $2\n" \
5266*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5267*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5268*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5269*a9643ea8Slogwang ); \
5270*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5271*a9643ea8Slogwang } while (0)
5272*a9643ea8Slogwang
5273*a9643ea8Slogwang #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5274*a9643ea8Slogwang do { \
5275*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5276*a9643ea8Slogwang volatile unsigned long _argvec[5]; \
5277*a9643ea8Slogwang volatile unsigned long _res; \
5278*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5279*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5280*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5281*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5282*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5283*a9643ea8Slogwang __asm__ volatile( \
5284*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5285*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5286*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5287*a9643ea8Slogwang "subu $29, $29, 16 \n\t" \
5288*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5289*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5290*a9643ea8Slogwang "lw $6, 12(%1) \n\t" \
5291*a9643ea8Slogwang "lw $7, 16(%1) \n\t" \
5292*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5293*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5294*a9643ea8Slogwang "addu $29, $29, 16 \n\t" \
5295*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5296*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5297*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5298*a9643ea8Slogwang "move %0, $2\n" \
5299*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5300*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5301*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5302*a9643ea8Slogwang ); \
5303*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5304*a9643ea8Slogwang } while (0)
5305*a9643ea8Slogwang
5306*a9643ea8Slogwang #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5307*a9643ea8Slogwang do { \
5308*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5309*a9643ea8Slogwang volatile unsigned long _argvec[6]; \
5310*a9643ea8Slogwang volatile unsigned long _res; \
5311*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5312*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5313*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5314*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5315*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5316*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5317*a9643ea8Slogwang __asm__ volatile( \
5318*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5319*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5320*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5321*a9643ea8Slogwang "lw $4, 20(%1) \n\t" \
5322*a9643ea8Slogwang "subu $29, $29, 24\n\t" \
5323*a9643ea8Slogwang "sw $4, 16($29) \n\t" \
5324*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5325*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5326*a9643ea8Slogwang "lw $6, 12(%1) \n\t" \
5327*a9643ea8Slogwang "lw $7, 16(%1) \n\t" \
5328*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5329*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5330*a9643ea8Slogwang "addu $29, $29, 24 \n\t" \
5331*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5332*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5333*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5334*a9643ea8Slogwang "move %0, $2\n" \
5335*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5336*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5337*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5338*a9643ea8Slogwang ); \
5339*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5340*a9643ea8Slogwang } while (0)
5341*a9643ea8Slogwang #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5342*a9643ea8Slogwang do { \
5343*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5344*a9643ea8Slogwang volatile unsigned long _argvec[7]; \
5345*a9643ea8Slogwang volatile unsigned long _res; \
5346*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5347*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5348*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5349*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5350*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5351*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5352*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5353*a9643ea8Slogwang __asm__ volatile( \
5354*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5355*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5356*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5357*a9643ea8Slogwang "lw $4, 20(%1) \n\t" \
5358*a9643ea8Slogwang "subu $29, $29, 32\n\t" \
5359*a9643ea8Slogwang "sw $4, 16($29) \n\t" \
5360*a9643ea8Slogwang "lw $4, 24(%1) \n\t" \
5361*a9643ea8Slogwang "nop\n\t" \
5362*a9643ea8Slogwang "sw $4, 20($29) \n\t" \
5363*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5364*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5365*a9643ea8Slogwang "lw $6, 12(%1) \n\t" \
5366*a9643ea8Slogwang "lw $7, 16(%1) \n\t" \
5367*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5368*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5369*a9643ea8Slogwang "addu $29, $29, 32 \n\t" \
5370*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5371*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5372*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5373*a9643ea8Slogwang "move %0, $2\n" \
5374*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5375*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5376*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5377*a9643ea8Slogwang ); \
5378*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5379*a9643ea8Slogwang } while (0)
5380*a9643ea8Slogwang
5381*a9643ea8Slogwang #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5382*a9643ea8Slogwang arg7) \
5383*a9643ea8Slogwang do { \
5384*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5385*a9643ea8Slogwang volatile unsigned long _argvec[8]; \
5386*a9643ea8Slogwang volatile unsigned long _res; \
5387*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5388*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5389*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5390*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5391*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5392*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5393*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5394*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
5395*a9643ea8Slogwang __asm__ volatile( \
5396*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5397*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5398*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5399*a9643ea8Slogwang "lw $4, 20(%1) \n\t" \
5400*a9643ea8Slogwang "subu $29, $29, 32\n\t" \
5401*a9643ea8Slogwang "sw $4, 16($29) \n\t" \
5402*a9643ea8Slogwang "lw $4, 24(%1) \n\t" \
5403*a9643ea8Slogwang "sw $4, 20($29) \n\t" \
5404*a9643ea8Slogwang "lw $4, 28(%1) \n\t" \
5405*a9643ea8Slogwang "sw $4, 24($29) \n\t" \
5406*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5407*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5408*a9643ea8Slogwang "lw $6, 12(%1) \n\t" \
5409*a9643ea8Slogwang "lw $7, 16(%1) \n\t" \
5410*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5411*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5412*a9643ea8Slogwang "addu $29, $29, 32 \n\t" \
5413*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5414*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5415*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5416*a9643ea8Slogwang "move %0, $2\n" \
5417*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5418*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5419*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5420*a9643ea8Slogwang ); \
5421*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5422*a9643ea8Slogwang } while (0)
5423*a9643ea8Slogwang
5424*a9643ea8Slogwang #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5425*a9643ea8Slogwang arg7,arg8) \
5426*a9643ea8Slogwang do { \
5427*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5428*a9643ea8Slogwang volatile unsigned long _argvec[9]; \
5429*a9643ea8Slogwang volatile unsigned long _res; \
5430*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5431*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5432*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5433*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5434*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5435*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5436*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5437*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
5438*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
5439*a9643ea8Slogwang __asm__ volatile( \
5440*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5441*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5442*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5443*a9643ea8Slogwang "lw $4, 20(%1) \n\t" \
5444*a9643ea8Slogwang "subu $29, $29, 40\n\t" \
5445*a9643ea8Slogwang "sw $4, 16($29) \n\t" \
5446*a9643ea8Slogwang "lw $4, 24(%1) \n\t" \
5447*a9643ea8Slogwang "sw $4, 20($29) \n\t" \
5448*a9643ea8Slogwang "lw $4, 28(%1) \n\t" \
5449*a9643ea8Slogwang "sw $4, 24($29) \n\t" \
5450*a9643ea8Slogwang "lw $4, 32(%1) \n\t" \
5451*a9643ea8Slogwang "sw $4, 28($29) \n\t" \
5452*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5453*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5454*a9643ea8Slogwang "lw $6, 12(%1) \n\t" \
5455*a9643ea8Slogwang "lw $7, 16(%1) \n\t" \
5456*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5457*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5458*a9643ea8Slogwang "addu $29, $29, 40 \n\t" \
5459*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5460*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5461*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5462*a9643ea8Slogwang "move %0, $2\n" \
5463*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5464*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5465*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5466*a9643ea8Slogwang ); \
5467*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5468*a9643ea8Slogwang } while (0)
5469*a9643ea8Slogwang
5470*a9643ea8Slogwang #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5471*a9643ea8Slogwang arg7,arg8,arg9) \
5472*a9643ea8Slogwang do { \
5473*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5474*a9643ea8Slogwang volatile unsigned long _argvec[10]; \
5475*a9643ea8Slogwang volatile unsigned long _res; \
5476*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5477*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5478*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5479*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5480*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5481*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5482*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5483*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
5484*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
5485*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
5486*a9643ea8Slogwang __asm__ volatile( \
5487*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5488*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5489*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5490*a9643ea8Slogwang "lw $4, 20(%1) \n\t" \
5491*a9643ea8Slogwang "subu $29, $29, 40\n\t" \
5492*a9643ea8Slogwang "sw $4, 16($29) \n\t" \
5493*a9643ea8Slogwang "lw $4, 24(%1) \n\t" \
5494*a9643ea8Slogwang "sw $4, 20($29) \n\t" \
5495*a9643ea8Slogwang "lw $4, 28(%1) \n\t" \
5496*a9643ea8Slogwang "sw $4, 24($29) \n\t" \
5497*a9643ea8Slogwang "lw $4, 32(%1) \n\t" \
5498*a9643ea8Slogwang "sw $4, 28($29) \n\t" \
5499*a9643ea8Slogwang "lw $4, 36(%1) \n\t" \
5500*a9643ea8Slogwang "sw $4, 32($29) \n\t" \
5501*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5502*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5503*a9643ea8Slogwang "lw $6, 12(%1) \n\t" \
5504*a9643ea8Slogwang "lw $7, 16(%1) \n\t" \
5505*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5506*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5507*a9643ea8Slogwang "addu $29, $29, 40 \n\t" \
5508*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5509*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5510*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5511*a9643ea8Slogwang "move %0, $2\n" \
5512*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5513*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5514*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5515*a9643ea8Slogwang ); \
5516*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5517*a9643ea8Slogwang } while (0)
5518*a9643ea8Slogwang
5519*a9643ea8Slogwang #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5520*a9643ea8Slogwang arg7,arg8,arg9,arg10) \
5521*a9643ea8Slogwang do { \
5522*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5523*a9643ea8Slogwang volatile unsigned long _argvec[11]; \
5524*a9643ea8Slogwang volatile unsigned long _res; \
5525*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5526*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5527*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5528*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5529*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5530*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5531*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5532*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
5533*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
5534*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
5535*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
5536*a9643ea8Slogwang __asm__ volatile( \
5537*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5538*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5539*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5540*a9643ea8Slogwang "lw $4, 20(%1) \n\t" \
5541*a9643ea8Slogwang "subu $29, $29, 48\n\t" \
5542*a9643ea8Slogwang "sw $4, 16($29) \n\t" \
5543*a9643ea8Slogwang "lw $4, 24(%1) \n\t" \
5544*a9643ea8Slogwang "sw $4, 20($29) \n\t" \
5545*a9643ea8Slogwang "lw $4, 28(%1) \n\t" \
5546*a9643ea8Slogwang "sw $4, 24($29) \n\t" \
5547*a9643ea8Slogwang "lw $4, 32(%1) \n\t" \
5548*a9643ea8Slogwang "sw $4, 28($29) \n\t" \
5549*a9643ea8Slogwang "lw $4, 36(%1) \n\t" \
5550*a9643ea8Slogwang "sw $4, 32($29) \n\t" \
5551*a9643ea8Slogwang "lw $4, 40(%1) \n\t" \
5552*a9643ea8Slogwang "sw $4, 36($29) \n\t" \
5553*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5554*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5555*a9643ea8Slogwang "lw $6, 12(%1) \n\t" \
5556*a9643ea8Slogwang "lw $7, 16(%1) \n\t" \
5557*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5558*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5559*a9643ea8Slogwang "addu $29, $29, 48 \n\t" \
5560*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5561*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5562*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5563*a9643ea8Slogwang "move %0, $2\n" \
5564*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5565*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5566*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5567*a9643ea8Slogwang ); \
5568*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5569*a9643ea8Slogwang } while (0)
5570*a9643ea8Slogwang
5571*a9643ea8Slogwang #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5572*a9643ea8Slogwang arg6,arg7,arg8,arg9,arg10, \
5573*a9643ea8Slogwang arg11) \
5574*a9643ea8Slogwang do { \
5575*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5576*a9643ea8Slogwang volatile unsigned long _argvec[12]; \
5577*a9643ea8Slogwang volatile unsigned long _res; \
5578*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5579*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5580*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5581*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5582*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5583*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5584*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5585*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
5586*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
5587*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
5588*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
5589*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
5590*a9643ea8Slogwang __asm__ volatile( \
5591*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5592*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5593*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5594*a9643ea8Slogwang "lw $4, 20(%1) \n\t" \
5595*a9643ea8Slogwang "subu $29, $29, 48\n\t" \
5596*a9643ea8Slogwang "sw $4, 16($29) \n\t" \
5597*a9643ea8Slogwang "lw $4, 24(%1) \n\t" \
5598*a9643ea8Slogwang "sw $4, 20($29) \n\t" \
5599*a9643ea8Slogwang "lw $4, 28(%1) \n\t" \
5600*a9643ea8Slogwang "sw $4, 24($29) \n\t" \
5601*a9643ea8Slogwang "lw $4, 32(%1) \n\t" \
5602*a9643ea8Slogwang "sw $4, 28($29) \n\t" \
5603*a9643ea8Slogwang "lw $4, 36(%1) \n\t" \
5604*a9643ea8Slogwang "sw $4, 32($29) \n\t" \
5605*a9643ea8Slogwang "lw $4, 40(%1) \n\t" \
5606*a9643ea8Slogwang "sw $4, 36($29) \n\t" \
5607*a9643ea8Slogwang "lw $4, 44(%1) \n\t" \
5608*a9643ea8Slogwang "sw $4, 40($29) \n\t" \
5609*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5610*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5611*a9643ea8Slogwang "lw $6, 12(%1) \n\t" \
5612*a9643ea8Slogwang "lw $7, 16(%1) \n\t" \
5613*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5614*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5615*a9643ea8Slogwang "addu $29, $29, 48 \n\t" \
5616*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5617*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5618*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5619*a9643ea8Slogwang "move %0, $2\n" \
5620*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5621*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5622*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5623*a9643ea8Slogwang ); \
5624*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5625*a9643ea8Slogwang } while (0)
5626*a9643ea8Slogwang
5627*a9643ea8Slogwang #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5628*a9643ea8Slogwang arg6,arg7,arg8,arg9,arg10, \
5629*a9643ea8Slogwang arg11,arg12) \
5630*a9643ea8Slogwang do { \
5631*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5632*a9643ea8Slogwang volatile unsigned long _argvec[13]; \
5633*a9643ea8Slogwang volatile unsigned long _res; \
5634*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5635*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5636*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5637*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5638*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5639*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5640*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5641*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
5642*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
5643*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
5644*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
5645*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
5646*a9643ea8Slogwang _argvec[12] = (unsigned long)(arg12); \
5647*a9643ea8Slogwang __asm__ volatile( \
5648*a9643ea8Slogwang "subu $29, $29, 8 \n\t" \
5649*a9643ea8Slogwang "sw $28, 0($29) \n\t" \
5650*a9643ea8Slogwang "sw $31, 4($29) \n\t" \
5651*a9643ea8Slogwang "lw $4, 20(%1) \n\t" \
5652*a9643ea8Slogwang "subu $29, $29, 56\n\t" \
5653*a9643ea8Slogwang "sw $4, 16($29) \n\t" \
5654*a9643ea8Slogwang "lw $4, 24(%1) \n\t" \
5655*a9643ea8Slogwang "sw $4, 20($29) \n\t" \
5656*a9643ea8Slogwang "lw $4, 28(%1) \n\t" \
5657*a9643ea8Slogwang "sw $4, 24($29) \n\t" \
5658*a9643ea8Slogwang "lw $4, 32(%1) \n\t" \
5659*a9643ea8Slogwang "sw $4, 28($29) \n\t" \
5660*a9643ea8Slogwang "lw $4, 36(%1) \n\t" \
5661*a9643ea8Slogwang "sw $4, 32($29) \n\t" \
5662*a9643ea8Slogwang "lw $4, 40(%1) \n\t" \
5663*a9643ea8Slogwang "sw $4, 36($29) \n\t" \
5664*a9643ea8Slogwang "lw $4, 44(%1) \n\t" \
5665*a9643ea8Slogwang "sw $4, 40($29) \n\t" \
5666*a9643ea8Slogwang "lw $4, 48(%1) \n\t" \
5667*a9643ea8Slogwang "sw $4, 44($29) \n\t" \
5668*a9643ea8Slogwang "lw $4, 4(%1) \n\t" \
5669*a9643ea8Slogwang "lw $5, 8(%1) \n\t" \
5670*a9643ea8Slogwang "lw $6, 12(%1) \n\t" \
5671*a9643ea8Slogwang "lw $7, 16(%1) \n\t" \
5672*a9643ea8Slogwang "lw $25, 0(%1) \n\t" /* target->t9 */ \
5673*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5674*a9643ea8Slogwang "addu $29, $29, 56 \n\t" \
5675*a9643ea8Slogwang "lw $28, 0($29) \n\t" \
5676*a9643ea8Slogwang "lw $31, 4($29) \n\t" \
5677*a9643ea8Slogwang "addu $29, $29, 8 \n\t" \
5678*a9643ea8Slogwang "move %0, $2\n" \
5679*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5680*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
5681*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5682*a9643ea8Slogwang ); \
5683*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5684*a9643ea8Slogwang } while (0)
5685*a9643ea8Slogwang
5686*a9643ea8Slogwang #endif /* PLAT_mips32_linux */
5687*a9643ea8Slogwang
5688*a9643ea8Slogwang /* ------------------------- mips64-linux ------------------------- */
5689*a9643ea8Slogwang
5690*a9643ea8Slogwang #if defined(PLAT_mips64_linux)
5691*a9643ea8Slogwang
5692*a9643ea8Slogwang /* These regs are trashed by the hidden call. */
5693*a9643ea8Slogwang #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5694*a9643ea8Slogwang "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5695*a9643ea8Slogwang "$25", "$31"
5696*a9643ea8Slogwang
5697*a9643ea8Slogwang /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5698*a9643ea8Slogwang long) == 4. */
5699*a9643ea8Slogwang
5700*a9643ea8Slogwang #define CALL_FN_W_v(lval, orig) \
5701*a9643ea8Slogwang do { \
5702*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5703*a9643ea8Slogwang volatile unsigned long _argvec[1]; \
5704*a9643ea8Slogwang volatile unsigned long _res; \
5705*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5706*a9643ea8Slogwang __asm__ volatile( \
5707*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
5708*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5709*a9643ea8Slogwang "move %0, $2\n" \
5710*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5711*a9643ea8Slogwang : /*in*/ "0" (&_argvec[0]) \
5712*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5713*a9643ea8Slogwang ); \
5714*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5715*a9643ea8Slogwang } while (0)
5716*a9643ea8Slogwang
5717*a9643ea8Slogwang #define CALL_FN_W_W(lval, orig, arg1) \
5718*a9643ea8Slogwang do { \
5719*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5720*a9643ea8Slogwang volatile unsigned long _argvec[2]; \
5721*a9643ea8Slogwang volatile unsigned long _res; \
5722*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5723*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5724*a9643ea8Slogwang __asm__ volatile( \
5725*a9643ea8Slogwang "ld $4, 8(%1)\n\t" /* arg1*/ \
5726*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
5727*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5728*a9643ea8Slogwang "move %0, $2\n" \
5729*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5730*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
5731*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5732*a9643ea8Slogwang ); \
5733*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5734*a9643ea8Slogwang } while (0)
5735*a9643ea8Slogwang
5736*a9643ea8Slogwang #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5737*a9643ea8Slogwang do { \
5738*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5739*a9643ea8Slogwang volatile unsigned long _argvec[3]; \
5740*a9643ea8Slogwang volatile unsigned long _res; \
5741*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5742*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5743*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5744*a9643ea8Slogwang __asm__ volatile( \
5745*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
5746*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
5747*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
5748*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5749*a9643ea8Slogwang "move %0, $2\n" \
5750*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5751*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
5752*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5753*a9643ea8Slogwang ); \
5754*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5755*a9643ea8Slogwang } while (0)
5756*a9643ea8Slogwang
5757*a9643ea8Slogwang #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5758*a9643ea8Slogwang do { \
5759*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5760*a9643ea8Slogwang volatile unsigned long _argvec[4]; \
5761*a9643ea8Slogwang volatile unsigned long _res; \
5762*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5763*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5764*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5765*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5766*a9643ea8Slogwang __asm__ volatile( \
5767*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
5768*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
5769*a9643ea8Slogwang "ld $6, 24(%1)\n\t" \
5770*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
5771*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5772*a9643ea8Slogwang "move %0, $2\n" \
5773*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5774*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
5775*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5776*a9643ea8Slogwang ); \
5777*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5778*a9643ea8Slogwang } while (0)
5779*a9643ea8Slogwang
5780*a9643ea8Slogwang #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5781*a9643ea8Slogwang do { \
5782*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5783*a9643ea8Slogwang volatile unsigned long _argvec[5]; \
5784*a9643ea8Slogwang volatile unsigned long _res; \
5785*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5786*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5787*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5788*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5789*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5790*a9643ea8Slogwang __asm__ volatile( \
5791*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
5792*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
5793*a9643ea8Slogwang "ld $6, 24(%1)\n\t" \
5794*a9643ea8Slogwang "ld $7, 32(%1)\n\t" \
5795*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
5796*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5797*a9643ea8Slogwang "move %0, $2\n" \
5798*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5799*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
5800*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5801*a9643ea8Slogwang ); \
5802*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5803*a9643ea8Slogwang } while (0)
5804*a9643ea8Slogwang
5805*a9643ea8Slogwang #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5806*a9643ea8Slogwang do { \
5807*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5808*a9643ea8Slogwang volatile unsigned long _argvec[6]; \
5809*a9643ea8Slogwang volatile unsigned long _res; \
5810*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5811*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5812*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5813*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5814*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5815*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5816*a9643ea8Slogwang __asm__ volatile( \
5817*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
5818*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
5819*a9643ea8Slogwang "ld $6, 24(%1)\n\t" \
5820*a9643ea8Slogwang "ld $7, 32(%1)\n\t" \
5821*a9643ea8Slogwang "ld $8, 40(%1)\n\t" \
5822*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
5823*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5824*a9643ea8Slogwang "move %0, $2\n" \
5825*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5826*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
5827*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5828*a9643ea8Slogwang ); \
5829*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5830*a9643ea8Slogwang } while (0)
5831*a9643ea8Slogwang
5832*a9643ea8Slogwang #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5833*a9643ea8Slogwang do { \
5834*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5835*a9643ea8Slogwang volatile unsigned long _argvec[7]; \
5836*a9643ea8Slogwang volatile unsigned long _res; \
5837*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5838*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5839*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5840*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5841*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5842*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5843*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5844*a9643ea8Slogwang __asm__ volatile( \
5845*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
5846*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
5847*a9643ea8Slogwang "ld $6, 24(%1)\n\t" \
5848*a9643ea8Slogwang "ld $7, 32(%1)\n\t" \
5849*a9643ea8Slogwang "ld $8, 40(%1)\n\t" \
5850*a9643ea8Slogwang "ld $9, 48(%1)\n\t" \
5851*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
5852*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5853*a9643ea8Slogwang "move %0, $2\n" \
5854*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5855*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
5856*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5857*a9643ea8Slogwang ); \
5858*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5859*a9643ea8Slogwang } while (0)
5860*a9643ea8Slogwang
5861*a9643ea8Slogwang #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5862*a9643ea8Slogwang arg7) \
5863*a9643ea8Slogwang do { \
5864*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5865*a9643ea8Slogwang volatile unsigned long _argvec[8]; \
5866*a9643ea8Slogwang volatile unsigned long _res; \
5867*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5868*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5869*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5870*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5871*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5872*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5873*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5874*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
5875*a9643ea8Slogwang __asm__ volatile( \
5876*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
5877*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
5878*a9643ea8Slogwang "ld $6, 24(%1)\n\t" \
5879*a9643ea8Slogwang "ld $7, 32(%1)\n\t" \
5880*a9643ea8Slogwang "ld $8, 40(%1)\n\t" \
5881*a9643ea8Slogwang "ld $9, 48(%1)\n\t" \
5882*a9643ea8Slogwang "ld $10, 56(%1)\n\t" \
5883*a9643ea8Slogwang "ld $25, 0(%1) \n\t" /* target->t9 */ \
5884*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5885*a9643ea8Slogwang "move %0, $2\n" \
5886*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5887*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
5888*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5889*a9643ea8Slogwang ); \
5890*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5891*a9643ea8Slogwang } while (0)
5892*a9643ea8Slogwang
5893*a9643ea8Slogwang #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5894*a9643ea8Slogwang arg7,arg8) \
5895*a9643ea8Slogwang do { \
5896*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5897*a9643ea8Slogwang volatile unsigned long _argvec[9]; \
5898*a9643ea8Slogwang volatile unsigned long _res; \
5899*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5900*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5901*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5902*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5903*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5904*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5905*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5906*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
5907*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
5908*a9643ea8Slogwang __asm__ volatile( \
5909*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
5910*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
5911*a9643ea8Slogwang "ld $6, 24(%1)\n\t" \
5912*a9643ea8Slogwang "ld $7, 32(%1)\n\t" \
5913*a9643ea8Slogwang "ld $8, 40(%1)\n\t" \
5914*a9643ea8Slogwang "ld $9, 48(%1)\n\t" \
5915*a9643ea8Slogwang "ld $10, 56(%1)\n\t" \
5916*a9643ea8Slogwang "ld $11, 64(%1)\n\t" \
5917*a9643ea8Slogwang "ld $25, 0(%1) \n\t" /* target->t9 */ \
5918*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5919*a9643ea8Slogwang "move %0, $2\n" \
5920*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5921*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
5922*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5923*a9643ea8Slogwang ); \
5924*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5925*a9643ea8Slogwang } while (0)
5926*a9643ea8Slogwang
5927*a9643ea8Slogwang #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5928*a9643ea8Slogwang arg7,arg8,arg9) \
5929*a9643ea8Slogwang do { \
5930*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5931*a9643ea8Slogwang volatile unsigned long _argvec[10]; \
5932*a9643ea8Slogwang volatile unsigned long _res; \
5933*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5934*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5935*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5936*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5937*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5938*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5939*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5940*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
5941*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
5942*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
5943*a9643ea8Slogwang __asm__ volatile( \
5944*a9643ea8Slogwang "dsubu $29, $29, 8\n\t" \
5945*a9643ea8Slogwang "ld $4, 72(%1)\n\t" \
5946*a9643ea8Slogwang "sd $4, 0($29)\n\t" \
5947*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
5948*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
5949*a9643ea8Slogwang "ld $6, 24(%1)\n\t" \
5950*a9643ea8Slogwang "ld $7, 32(%1)\n\t" \
5951*a9643ea8Slogwang "ld $8, 40(%1)\n\t" \
5952*a9643ea8Slogwang "ld $9, 48(%1)\n\t" \
5953*a9643ea8Slogwang "ld $10, 56(%1)\n\t" \
5954*a9643ea8Slogwang "ld $11, 64(%1)\n\t" \
5955*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
5956*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5957*a9643ea8Slogwang "daddu $29, $29, 8\n\t" \
5958*a9643ea8Slogwang "move %0, $2\n" \
5959*a9643ea8Slogwang : /*out*/ "=r" (_res) \
5960*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
5961*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
5962*a9643ea8Slogwang ); \
5963*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
5964*a9643ea8Slogwang } while (0)
5965*a9643ea8Slogwang
5966*a9643ea8Slogwang #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5967*a9643ea8Slogwang arg7,arg8,arg9,arg10) \
5968*a9643ea8Slogwang do { \
5969*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
5970*a9643ea8Slogwang volatile unsigned long _argvec[11]; \
5971*a9643ea8Slogwang volatile unsigned long _res; \
5972*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
5973*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
5974*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
5975*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
5976*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
5977*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
5978*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
5979*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
5980*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
5981*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
5982*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
5983*a9643ea8Slogwang __asm__ volatile( \
5984*a9643ea8Slogwang "dsubu $29, $29, 16\n\t" \
5985*a9643ea8Slogwang "ld $4, 72(%1)\n\t" \
5986*a9643ea8Slogwang "sd $4, 0($29)\n\t" \
5987*a9643ea8Slogwang "ld $4, 80(%1)\n\t" \
5988*a9643ea8Slogwang "sd $4, 8($29)\n\t" \
5989*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
5990*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
5991*a9643ea8Slogwang "ld $6, 24(%1)\n\t" \
5992*a9643ea8Slogwang "ld $7, 32(%1)\n\t" \
5993*a9643ea8Slogwang "ld $8, 40(%1)\n\t" \
5994*a9643ea8Slogwang "ld $9, 48(%1)\n\t" \
5995*a9643ea8Slogwang "ld $10, 56(%1)\n\t" \
5996*a9643ea8Slogwang "ld $11, 64(%1)\n\t" \
5997*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
5998*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
5999*a9643ea8Slogwang "daddu $29, $29, 16\n\t" \
6000*a9643ea8Slogwang "move %0, $2\n" \
6001*a9643ea8Slogwang : /*out*/ "=r" (_res) \
6002*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
6003*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
6004*a9643ea8Slogwang ); \
6005*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
6006*a9643ea8Slogwang } while (0)
6007*a9643ea8Slogwang
6008*a9643ea8Slogwang #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6009*a9643ea8Slogwang arg6,arg7,arg8,arg9,arg10, \
6010*a9643ea8Slogwang arg11) \
6011*a9643ea8Slogwang do { \
6012*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
6013*a9643ea8Slogwang volatile unsigned long _argvec[12]; \
6014*a9643ea8Slogwang volatile unsigned long _res; \
6015*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
6016*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
6017*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
6018*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
6019*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
6020*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
6021*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
6022*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
6023*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
6024*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
6025*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
6026*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
6027*a9643ea8Slogwang __asm__ volatile( \
6028*a9643ea8Slogwang "dsubu $29, $29, 24\n\t" \
6029*a9643ea8Slogwang "ld $4, 72(%1)\n\t" \
6030*a9643ea8Slogwang "sd $4, 0($29)\n\t" \
6031*a9643ea8Slogwang "ld $4, 80(%1)\n\t" \
6032*a9643ea8Slogwang "sd $4, 8($29)\n\t" \
6033*a9643ea8Slogwang "ld $4, 88(%1)\n\t" \
6034*a9643ea8Slogwang "sd $4, 16($29)\n\t" \
6035*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
6036*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
6037*a9643ea8Slogwang "ld $6, 24(%1)\n\t" \
6038*a9643ea8Slogwang "ld $7, 32(%1)\n\t" \
6039*a9643ea8Slogwang "ld $8, 40(%1)\n\t" \
6040*a9643ea8Slogwang "ld $9, 48(%1)\n\t" \
6041*a9643ea8Slogwang "ld $10, 56(%1)\n\t" \
6042*a9643ea8Slogwang "ld $11, 64(%1)\n\t" \
6043*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
6044*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
6045*a9643ea8Slogwang "daddu $29, $29, 24\n\t" \
6046*a9643ea8Slogwang "move %0, $2\n" \
6047*a9643ea8Slogwang : /*out*/ "=r" (_res) \
6048*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
6049*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
6050*a9643ea8Slogwang ); \
6051*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
6052*a9643ea8Slogwang } while (0)
6053*a9643ea8Slogwang
6054*a9643ea8Slogwang #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6055*a9643ea8Slogwang arg6,arg7,arg8,arg9,arg10, \
6056*a9643ea8Slogwang arg11,arg12) \
6057*a9643ea8Slogwang do { \
6058*a9643ea8Slogwang volatile OrigFn _orig = (orig); \
6059*a9643ea8Slogwang volatile unsigned long _argvec[13]; \
6060*a9643ea8Slogwang volatile unsigned long _res; \
6061*a9643ea8Slogwang _argvec[0] = (unsigned long)_orig.nraddr; \
6062*a9643ea8Slogwang _argvec[1] = (unsigned long)(arg1); \
6063*a9643ea8Slogwang _argvec[2] = (unsigned long)(arg2); \
6064*a9643ea8Slogwang _argvec[3] = (unsigned long)(arg3); \
6065*a9643ea8Slogwang _argvec[4] = (unsigned long)(arg4); \
6066*a9643ea8Slogwang _argvec[5] = (unsigned long)(arg5); \
6067*a9643ea8Slogwang _argvec[6] = (unsigned long)(arg6); \
6068*a9643ea8Slogwang _argvec[7] = (unsigned long)(arg7); \
6069*a9643ea8Slogwang _argvec[8] = (unsigned long)(arg8); \
6070*a9643ea8Slogwang _argvec[9] = (unsigned long)(arg9); \
6071*a9643ea8Slogwang _argvec[10] = (unsigned long)(arg10); \
6072*a9643ea8Slogwang _argvec[11] = (unsigned long)(arg11); \
6073*a9643ea8Slogwang _argvec[12] = (unsigned long)(arg12); \
6074*a9643ea8Slogwang __asm__ volatile( \
6075*a9643ea8Slogwang "dsubu $29, $29, 32\n\t" \
6076*a9643ea8Slogwang "ld $4, 72(%1)\n\t" \
6077*a9643ea8Slogwang "sd $4, 0($29)\n\t" \
6078*a9643ea8Slogwang "ld $4, 80(%1)\n\t" \
6079*a9643ea8Slogwang "sd $4, 8($29)\n\t" \
6080*a9643ea8Slogwang "ld $4, 88(%1)\n\t" \
6081*a9643ea8Slogwang "sd $4, 16($29)\n\t" \
6082*a9643ea8Slogwang "ld $4, 96(%1)\n\t" \
6083*a9643ea8Slogwang "sd $4, 24($29)\n\t" \
6084*a9643ea8Slogwang "ld $4, 8(%1)\n\t" \
6085*a9643ea8Slogwang "ld $5, 16(%1)\n\t" \
6086*a9643ea8Slogwang "ld $6, 24(%1)\n\t" \
6087*a9643ea8Slogwang "ld $7, 32(%1)\n\t" \
6088*a9643ea8Slogwang "ld $8, 40(%1)\n\t" \
6089*a9643ea8Slogwang "ld $9, 48(%1)\n\t" \
6090*a9643ea8Slogwang "ld $10, 56(%1)\n\t" \
6091*a9643ea8Slogwang "ld $11, 64(%1)\n\t" \
6092*a9643ea8Slogwang "ld $25, 0(%1)\n\t" /* target->t9 */ \
6093*a9643ea8Slogwang VALGRIND_CALL_NOREDIR_T9 \
6094*a9643ea8Slogwang "daddu $29, $29, 32\n\t" \
6095*a9643ea8Slogwang "move %0, $2\n" \
6096*a9643ea8Slogwang : /*out*/ "=r" (_res) \
6097*a9643ea8Slogwang : /*in*/ "r" (&_argvec[0]) \
6098*a9643ea8Slogwang : /*trash*/ "memory", __CALLER_SAVED_REGS \
6099*a9643ea8Slogwang ); \
6100*a9643ea8Slogwang lval = (__typeof__(lval)) _res; \
6101*a9643ea8Slogwang } while (0)
6102*a9643ea8Slogwang
6103*a9643ea8Slogwang #endif /* PLAT_mips64_linux */
6104*a9643ea8Slogwang
6105*a9643ea8Slogwang
6106*a9643ea8Slogwang /* ------------------------------------------------------------------ */
6107*a9643ea8Slogwang /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6108*a9643ea8Slogwang /* */
6109*a9643ea8Slogwang /* ------------------------------------------------------------------ */
6110*a9643ea8Slogwang
6111*a9643ea8Slogwang /* Some request codes. There are many more of these, but most are not
6112*a9643ea8Slogwang exposed to end-user view. These are the public ones, all of the
6113*a9643ea8Slogwang form 0x1000 + small_number.
6114*a9643ea8Slogwang
6115*a9643ea8Slogwang Core ones are in the range 0x00000000--0x0000ffff. The non-public
6116*a9643ea8Slogwang ones start at 0x2000.
6117*a9643ea8Slogwang */
6118*a9643ea8Slogwang
6119*a9643ea8Slogwang /* These macros are used by tools -- they must be public, but don't
6120*a9643ea8Slogwang embed them into other programs. */
6121*a9643ea8Slogwang #define VG_USERREQ_TOOL_BASE(a,b) \
6122*a9643ea8Slogwang ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6123*a9643ea8Slogwang #define VG_IS_TOOL_USERREQ(a, b, v) \
6124*a9643ea8Slogwang (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6125*a9643ea8Slogwang
6126*a9643ea8Slogwang /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6127*a9643ea8Slogwang This enum comprises an ABI exported by Valgrind to programs
6128*a9643ea8Slogwang which use client requests. DO NOT CHANGE THE ORDER OF THESE
6129*a9643ea8Slogwang ENTRIES, NOR DELETE ANY -- add new ones at the end. */
6130*a9643ea8Slogwang typedef
6131*a9643ea8Slogwang enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6132*a9643ea8Slogwang VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6133*a9643ea8Slogwang
6134*a9643ea8Slogwang /* These allow any function to be called from the simulated
6135*a9643ea8Slogwang CPU but run on the real CPU. Nb: the first arg passed to
6136*a9643ea8Slogwang the function is always the ThreadId of the running
6137*a9643ea8Slogwang thread! So CLIENT_CALL0 actually requires a 1 arg
6138*a9643ea8Slogwang function, etc. */
6139*a9643ea8Slogwang VG_USERREQ__CLIENT_CALL0 = 0x1101,
6140*a9643ea8Slogwang VG_USERREQ__CLIENT_CALL1 = 0x1102,
6141*a9643ea8Slogwang VG_USERREQ__CLIENT_CALL2 = 0x1103,
6142*a9643ea8Slogwang VG_USERREQ__CLIENT_CALL3 = 0x1104,
6143*a9643ea8Slogwang
6144*a9643ea8Slogwang /* Can be useful in regression testing suites -- eg. can
6145*a9643ea8Slogwang send Valgrind's output to /dev/null and still count
6146*a9643ea8Slogwang errors. */
6147*a9643ea8Slogwang VG_USERREQ__COUNT_ERRORS = 0x1201,
6148*a9643ea8Slogwang
6149*a9643ea8Slogwang /* Allows the client program and/or gdbserver to execute a monitor
6150*a9643ea8Slogwang command. */
6151*a9643ea8Slogwang VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6152*a9643ea8Slogwang
6153*a9643ea8Slogwang /* These are useful and can be interpreted by any tool that
6154*a9643ea8Slogwang tracks malloc() et al, by using vg_replace_malloc.c. */
6155*a9643ea8Slogwang VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6156*a9643ea8Slogwang VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6157*a9643ea8Slogwang VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6158*a9643ea8Slogwang /* Memory pool support. */
6159*a9643ea8Slogwang VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6160*a9643ea8Slogwang VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6161*a9643ea8Slogwang VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6162*a9643ea8Slogwang VG_USERREQ__MEMPOOL_FREE = 0x1306,
6163*a9643ea8Slogwang VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6164*a9643ea8Slogwang VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6165*a9643ea8Slogwang VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6166*a9643ea8Slogwang VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6167*a9643ea8Slogwang
6168*a9643ea8Slogwang /* Allow printfs to valgrind log. */
6169*a9643ea8Slogwang /* The first two pass the va_list argument by value, which
6170*a9643ea8Slogwang assumes it is the same size as or smaller than a UWord,
6171*a9643ea8Slogwang which generally isn't the case. Hence are deprecated.
6172*a9643ea8Slogwang The second two pass the vargs by reference and so are
6173*a9643ea8Slogwang immune to this problem. */
6174*a9643ea8Slogwang /* both :: char* fmt, va_list vargs (DEPRECATED) */
6175*a9643ea8Slogwang VG_USERREQ__PRINTF = 0x1401,
6176*a9643ea8Slogwang VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6177*a9643ea8Slogwang /* both :: char* fmt, va_list* vargs */
6178*a9643ea8Slogwang VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6179*a9643ea8Slogwang VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6180*a9643ea8Slogwang
6181*a9643ea8Slogwang /* Stack support. */
6182*a9643ea8Slogwang VG_USERREQ__STACK_REGISTER = 0x1501,
6183*a9643ea8Slogwang VG_USERREQ__STACK_DEREGISTER = 0x1502,
6184*a9643ea8Slogwang VG_USERREQ__STACK_CHANGE = 0x1503,
6185*a9643ea8Slogwang
6186*a9643ea8Slogwang /* Wine support */
6187*a9643ea8Slogwang VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6188*a9643ea8Slogwang
6189*a9643ea8Slogwang /* Querying of debug info. */
6190*a9643ea8Slogwang VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6191*a9643ea8Slogwang
6192*a9643ea8Slogwang /* Disable/enable error reporting level. Takes a single
6193*a9643ea8Slogwang Word arg which is the delta to this thread's error
6194*a9643ea8Slogwang disablement indicator. Hence 1 disables or further
6195*a9643ea8Slogwang disables errors, and -1 moves back towards enablement.
6196*a9643ea8Slogwang Other values are not allowed. */
6197*a9643ea8Slogwang VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6198*a9643ea8Slogwang
6199*a9643ea8Slogwang /* Initialise IR injection */
6200*a9643ea8Slogwang VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
6201*a9643ea8Slogwang } Vg_ClientRequest;
6202*a9643ea8Slogwang
6203*a9643ea8Slogwang #if !defined(__GNUC__)
6204*a9643ea8Slogwang # define __extension__ /* */
6205*a9643ea8Slogwang #endif
6206*a9643ea8Slogwang
6207*a9643ea8Slogwang
6208*a9643ea8Slogwang /* Returns the number of Valgrinds this code is running under. That
6209*a9643ea8Slogwang is, 0 if running natively, 1 if running under Valgrind, 2 if
6210*a9643ea8Slogwang running under Valgrind which is running under another Valgrind,
6211*a9643ea8Slogwang etc. */
6212*a9643ea8Slogwang #define RUNNING_ON_VALGRIND \
6213*a9643ea8Slogwang (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6214*a9643ea8Slogwang VG_USERREQ__RUNNING_ON_VALGRIND, \
6215*a9643ea8Slogwang 0, 0, 0, 0, 0) \
6216*a9643ea8Slogwang
6217*a9643ea8Slogwang
6218*a9643ea8Slogwang /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6219*a9643ea8Slogwang _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6220*a9643ea8Slogwang since it provides a way to make sure valgrind will retranslate the
6221*a9643ea8Slogwang invalidated area. Returns no value. */
6222*a9643ea8Slogwang #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6223*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6224*a9643ea8Slogwang _qzz_addr, _qzz_len, 0, 0, 0)
6225*a9643ea8Slogwang
6226*a9643ea8Slogwang
6227*a9643ea8Slogwang /* These requests are for getting Valgrind itself to print something.
6228*a9643ea8Slogwang Possibly with a backtrace. This is a really ugly hack. The return value
6229*a9643ea8Slogwang is the number of characters printed, excluding the "**<pid>** " part at the
6230*a9643ea8Slogwang start and the backtrace (if present). */
6231*a9643ea8Slogwang
6232*a9643ea8Slogwang #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6233*a9643ea8Slogwang /* Modern GCC will optimize the static routine out if unused,
6234*a9643ea8Slogwang and unused attribute will shut down warnings about it. */
6235*a9643ea8Slogwang static int VALGRIND_PRINTF(const char *format, ...)
6236*a9643ea8Slogwang __attribute__((format(__printf__, 1, 2), __unused__));
6237*a9643ea8Slogwang #endif
6238*a9643ea8Slogwang static int
6239*a9643ea8Slogwang #if defined(_MSC_VER)
6240*a9643ea8Slogwang __inline
6241*a9643ea8Slogwang #endif
VALGRIND_PRINTF(const char * format,...)6242*a9643ea8Slogwang VALGRIND_PRINTF(const char *format, ...)
6243*a9643ea8Slogwang {
6244*a9643ea8Slogwang #if defined(NVALGRIND)
6245*a9643ea8Slogwang return 0;
6246*a9643ea8Slogwang #else /* NVALGRIND */
6247*a9643ea8Slogwang #if defined(_MSC_VER) || defined(__MINGW64__)
6248*a9643ea8Slogwang uintptr_t _qzz_res;
6249*a9643ea8Slogwang #else
6250*a9643ea8Slogwang unsigned long _qzz_res;
6251*a9643ea8Slogwang #endif
6252*a9643ea8Slogwang va_list vargs;
6253*a9643ea8Slogwang va_start(vargs, format);
6254*a9643ea8Slogwang #if defined(_MSC_VER) || defined(__MINGW64__)
6255*a9643ea8Slogwang _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6256*a9643ea8Slogwang VG_USERREQ__PRINTF_VALIST_BY_REF,
6257*a9643ea8Slogwang (uintptr_t)format,
6258*a9643ea8Slogwang (uintptr_t)&vargs,
6259*a9643ea8Slogwang 0, 0, 0);
6260*a9643ea8Slogwang #else
6261*a9643ea8Slogwang _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6262*a9643ea8Slogwang VG_USERREQ__PRINTF_VALIST_BY_REF,
6263*a9643ea8Slogwang (unsigned long)format,
6264*a9643ea8Slogwang (unsigned long)&vargs,
6265*a9643ea8Slogwang 0, 0, 0);
6266*a9643ea8Slogwang #endif
6267*a9643ea8Slogwang va_end(vargs);
6268*a9643ea8Slogwang return (int)_qzz_res;
6269*a9643ea8Slogwang #endif /* NVALGRIND */
6270*a9643ea8Slogwang }
6271*a9643ea8Slogwang
6272*a9643ea8Slogwang #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6273*a9643ea8Slogwang static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6274*a9643ea8Slogwang __attribute__((format(__printf__, 1, 2), __unused__));
6275*a9643ea8Slogwang #endif
6276*a9643ea8Slogwang static int
6277*a9643ea8Slogwang #if defined(_MSC_VER)
6278*a9643ea8Slogwang __inline
6279*a9643ea8Slogwang #endif
VALGRIND_PRINTF_BACKTRACE(const char * format,...)6280*a9643ea8Slogwang VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6281*a9643ea8Slogwang {
6282*a9643ea8Slogwang #if defined(NVALGRIND)
6283*a9643ea8Slogwang return 0;
6284*a9643ea8Slogwang #else /* NVALGRIND */
6285*a9643ea8Slogwang #if defined(_MSC_VER) || defined(__MINGW64__)
6286*a9643ea8Slogwang uintptr_t _qzz_res;
6287*a9643ea8Slogwang #else
6288*a9643ea8Slogwang unsigned long _qzz_res;
6289*a9643ea8Slogwang #endif
6290*a9643ea8Slogwang va_list vargs;
6291*a9643ea8Slogwang va_start(vargs, format);
6292*a9643ea8Slogwang #if defined(_MSC_VER) || defined(__MINGW64__)
6293*a9643ea8Slogwang _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6294*a9643ea8Slogwang VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6295*a9643ea8Slogwang (uintptr_t)format,
6296*a9643ea8Slogwang (uintptr_t)&vargs,
6297*a9643ea8Slogwang 0, 0, 0);
6298*a9643ea8Slogwang #else
6299*a9643ea8Slogwang _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6300*a9643ea8Slogwang VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6301*a9643ea8Slogwang (unsigned long)format,
6302*a9643ea8Slogwang (unsigned long)&vargs,
6303*a9643ea8Slogwang 0, 0, 0);
6304*a9643ea8Slogwang #endif
6305*a9643ea8Slogwang va_end(vargs);
6306*a9643ea8Slogwang return (int)_qzz_res;
6307*a9643ea8Slogwang #endif /* NVALGRIND */
6308*a9643ea8Slogwang }
6309*a9643ea8Slogwang
6310*a9643ea8Slogwang
6311*a9643ea8Slogwang /* These requests allow control to move from the simulated CPU to the
6312*a9643ea8Slogwang real CPU, calling an arbitary function.
6313*a9643ea8Slogwang
6314*a9643ea8Slogwang Note that the current ThreadId is inserted as the first argument.
6315*a9643ea8Slogwang So this call:
6316*a9643ea8Slogwang
6317*a9643ea8Slogwang VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6318*a9643ea8Slogwang
6319*a9643ea8Slogwang requires f to have this signature:
6320*a9643ea8Slogwang
6321*a9643ea8Slogwang Word f(Word tid, Word arg1, Word arg2)
6322*a9643ea8Slogwang
6323*a9643ea8Slogwang where "Word" is a word-sized type.
6324*a9643ea8Slogwang
6325*a9643ea8Slogwang Note that these client requests are not entirely reliable. For example,
6326*a9643ea8Slogwang if you call a function with them that subsequently calls printf(),
6327*a9643ea8Slogwang there's a high chance Valgrind will crash. Generally, your prospects of
6328*a9643ea8Slogwang these working are made higher if the called function does not refer to
6329*a9643ea8Slogwang any global variables, and does not refer to any libc or other functions
6330*a9643ea8Slogwang (printf et al). Any kind of entanglement with libc or dynamic linking is
6331*a9643ea8Slogwang likely to have a bad outcome, for tricky reasons which we've grappled
6332*a9643ea8Slogwang with a lot in the past.
6333*a9643ea8Slogwang */
6334*a9643ea8Slogwang #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6335*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6336*a9643ea8Slogwang VG_USERREQ__CLIENT_CALL0, \
6337*a9643ea8Slogwang _qyy_fn, \
6338*a9643ea8Slogwang 0, 0, 0, 0)
6339*a9643ea8Slogwang
6340*a9643ea8Slogwang #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6341*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6342*a9643ea8Slogwang VG_USERREQ__CLIENT_CALL1, \
6343*a9643ea8Slogwang _qyy_fn, \
6344*a9643ea8Slogwang _qyy_arg1, 0, 0, 0)
6345*a9643ea8Slogwang
6346*a9643ea8Slogwang #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6347*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6348*a9643ea8Slogwang VG_USERREQ__CLIENT_CALL2, \
6349*a9643ea8Slogwang _qyy_fn, \
6350*a9643ea8Slogwang _qyy_arg1, _qyy_arg2, 0, 0)
6351*a9643ea8Slogwang
6352*a9643ea8Slogwang #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6353*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6354*a9643ea8Slogwang VG_USERREQ__CLIENT_CALL3, \
6355*a9643ea8Slogwang _qyy_fn, \
6356*a9643ea8Slogwang _qyy_arg1, _qyy_arg2, \
6357*a9643ea8Slogwang _qyy_arg3, 0)
6358*a9643ea8Slogwang
6359*a9643ea8Slogwang
6360*a9643ea8Slogwang /* Counts the number of errors that have been recorded by a tool. Nb:
6361*a9643ea8Slogwang the tool must record the errors with VG_(maybe_record_error)() or
6362*a9643ea8Slogwang VG_(unique_error)() for them to be counted. */
6363*a9643ea8Slogwang #define VALGRIND_COUNT_ERRORS \
6364*a9643ea8Slogwang (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6365*a9643ea8Slogwang 0 /* default return */, \
6366*a9643ea8Slogwang VG_USERREQ__COUNT_ERRORS, \
6367*a9643ea8Slogwang 0, 0, 0, 0, 0)
6368*a9643ea8Slogwang
6369*a9643ea8Slogwang /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6370*a9643ea8Slogwang when heap blocks are allocated in order to give accurate results. This
6371*a9643ea8Slogwang happens automatically for the standard allocator functions such as
6372*a9643ea8Slogwang malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6373*a9643ea8Slogwang delete[], etc.
6374*a9643ea8Slogwang
6375*a9643ea8Slogwang But if your program uses a custom allocator, this doesn't automatically
6376*a9643ea8Slogwang happen, and Valgrind will not do as well. For example, if you allocate
6377*a9643ea8Slogwang superblocks with mmap() and then allocates chunks of the superblocks, all
6378*a9643ea8Slogwang Valgrind's observations will be at the mmap() level and it won't know that
6379*a9643ea8Slogwang the chunks should be considered separate entities. In Memcheck's case,
6380*a9643ea8Slogwang that means you probably won't get heap block overrun detection (because
6381*a9643ea8Slogwang there won't be redzones marked as unaddressable) and you definitely won't
6382*a9643ea8Slogwang get any leak detection.
6383*a9643ea8Slogwang
6384*a9643ea8Slogwang The following client requests allow a custom allocator to be annotated so
6385*a9643ea8Slogwang that it can be handled accurately by Valgrind.
6386*a9643ea8Slogwang
6387*a9643ea8Slogwang VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6388*a9643ea8Slogwang by a malloc()-like function. For Memcheck (an illustrative case), this
6389*a9643ea8Slogwang does two things:
6390*a9643ea8Slogwang
6391*a9643ea8Slogwang - It records that the block has been allocated. This means any addresses
6392*a9643ea8Slogwang within the block mentioned in error messages will be
6393*a9643ea8Slogwang identified as belonging to the block. It also means that if the block
6394*a9643ea8Slogwang isn't freed it will be detected by the leak checker.
6395*a9643ea8Slogwang
6396*a9643ea8Slogwang - It marks the block as being addressable and undefined (if 'is_zeroed' is
6397*a9643ea8Slogwang not set), or addressable and defined (if 'is_zeroed' is set). This
6398*a9643ea8Slogwang controls how accesses to the block by the program are handled.
6399*a9643ea8Slogwang
6400*a9643ea8Slogwang 'addr' is the start of the usable block (ie. after any
6401*a9643ea8Slogwang redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6402*a9643ea8Slogwang can apply redzones -- these are blocks of padding at the start and end of
6403*a9643ea8Slogwang each block. Adding redzones is recommended as it makes it much more likely
6404*a9643ea8Slogwang Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6405*a9643ea8Slogwang zeroed (or filled with another predictable value), as is the case for
6406*a9643ea8Slogwang calloc().
6407*a9643ea8Slogwang
6408*a9643ea8Slogwang VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6409*a9643ea8Slogwang heap block -- that will be used by the client program -- is allocated.
6410*a9643ea8Slogwang It's best to put it at the outermost level of the allocator if possible;
6411*a9643ea8Slogwang for example, if you have a function my_alloc() which calls
6412*a9643ea8Slogwang internal_alloc(), and the client request is put inside internal_alloc(),
6413*a9643ea8Slogwang stack traces relating to the heap block will contain entries for both
6414*a9643ea8Slogwang my_alloc() and internal_alloc(), which is probably not what you want.
6415*a9643ea8Slogwang
6416*a9643ea8Slogwang For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6417*a9643ea8Slogwang custom blocks from within a heap block, B, that has been allocated with
6418*a9643ea8Slogwang malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6419*a9643ea8Slogwang -- the custom blocks will take precedence.
6420*a9643ea8Slogwang
6421*a9643ea8Slogwang VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6422*a9643ea8Slogwang Memcheck, it does two things:
6423*a9643ea8Slogwang
6424*a9643ea8Slogwang - It records that the block has been deallocated. This assumes that the
6425*a9643ea8Slogwang block was annotated as having been allocated via
6426*a9643ea8Slogwang VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6427*a9643ea8Slogwang
6428*a9643ea8Slogwang - It marks the block as being unaddressable.
6429*a9643ea8Slogwang
6430*a9643ea8Slogwang VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6431*a9643ea8Slogwang heap block is deallocated.
6432*a9643ea8Slogwang
6433*a9643ea8Slogwang VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6434*a9643ea8Slogwang Memcheck, it does four things:
6435*a9643ea8Slogwang
6436*a9643ea8Slogwang - It records that the size of a block has been changed. This assumes that
6437*a9643ea8Slogwang the block was annotated as having been allocated via
6438*a9643ea8Slogwang VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6439*a9643ea8Slogwang
6440*a9643ea8Slogwang - If the block shrunk, it marks the freed memory as being unaddressable.
6441*a9643ea8Slogwang
6442*a9643ea8Slogwang - If the block grew, it marks the new area as undefined and defines a red
6443*a9643ea8Slogwang zone past the end of the new block.
6444*a9643ea8Slogwang
6445*a9643ea8Slogwang - The V-bits of the overlap between the old and the new block are preserved.
6446*a9643ea8Slogwang
6447*a9643ea8Slogwang VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6448*a9643ea8Slogwang and before deallocation of the old block.
6449*a9643ea8Slogwang
6450*a9643ea8Slogwang In many cases, these three client requests will not be enough to get your
6451*a9643ea8Slogwang allocator working well with Memcheck. More specifically, if your allocator
6452*a9643ea8Slogwang writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6453*a9643ea8Slogwang will be necessary to mark the memory as addressable just before the zeroing
6454*a9643ea8Slogwang occurs, otherwise you'll get a lot of invalid write errors. For example,
6455*a9643ea8Slogwang you'll need to do this if your allocator recycles freed blocks, but it
6456*a9643ea8Slogwang zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6457*a9643ea8Slogwang Alternatively, if your allocator reuses freed blocks for allocator-internal
6458*a9643ea8Slogwang data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6459*a9643ea8Slogwang
6460*a9643ea8Slogwang Really, what's happening is a blurring of the lines between the client
6461*a9643ea8Slogwang program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6462*a9643ea8Slogwang memory should be considered unaddressable to the client program, but the
6463*a9643ea8Slogwang allocator knows more than the rest of the client program and so may be able
6464*a9643ea8Slogwang to safely access it. Extra client requests are necessary for Valgrind to
6465*a9643ea8Slogwang understand the distinction between the allocator and the rest of the
6466*a9643ea8Slogwang program.
6467*a9643ea8Slogwang
6468*a9643ea8Slogwang Ignored if addr == 0.
6469*a9643ea8Slogwang */
6470*a9643ea8Slogwang #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6471*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6472*a9643ea8Slogwang addr, sizeB, rzB, is_zeroed, 0)
6473*a9643ea8Slogwang
6474*a9643ea8Slogwang /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6475*a9643ea8Slogwang Ignored if addr == 0.
6476*a9643ea8Slogwang */
6477*a9643ea8Slogwang #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6478*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6479*a9643ea8Slogwang addr, oldSizeB, newSizeB, rzB, 0)
6480*a9643ea8Slogwang
6481*a9643ea8Slogwang /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6482*a9643ea8Slogwang Ignored if addr == 0.
6483*a9643ea8Slogwang */
6484*a9643ea8Slogwang #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6485*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6486*a9643ea8Slogwang addr, rzB, 0, 0, 0)
6487*a9643ea8Slogwang
6488*a9643ea8Slogwang /* Create a memory pool. */
6489*a9643ea8Slogwang #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6490*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6491*a9643ea8Slogwang pool, rzB, is_zeroed, 0, 0)
6492*a9643ea8Slogwang
6493*a9643ea8Slogwang /* Destroy a memory pool. */
6494*a9643ea8Slogwang #define VALGRIND_DESTROY_MEMPOOL(pool) \
6495*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6496*a9643ea8Slogwang pool, 0, 0, 0, 0)
6497*a9643ea8Slogwang
6498*a9643ea8Slogwang /* Associate a piece of memory with a memory pool. */
6499*a9643ea8Slogwang #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6500*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6501*a9643ea8Slogwang pool, addr, size, 0, 0)
6502*a9643ea8Slogwang
6503*a9643ea8Slogwang /* Disassociate a piece of memory from a memory pool. */
6504*a9643ea8Slogwang #define VALGRIND_MEMPOOL_FREE(pool, addr) \
6505*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6506*a9643ea8Slogwang pool, addr, 0, 0, 0)
6507*a9643ea8Slogwang
6508*a9643ea8Slogwang /* Disassociate any pieces outside a particular range. */
6509*a9643ea8Slogwang #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6510*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
6511*a9643ea8Slogwang pool, addr, size, 0, 0)
6512*a9643ea8Slogwang
6513*a9643ea8Slogwang /* Resize and/or move a piece associated with a memory pool. */
6514*a9643ea8Slogwang #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
6515*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
6516*a9643ea8Slogwang poolA, poolB, 0, 0, 0)
6517*a9643ea8Slogwang
6518*a9643ea8Slogwang /* Resize and/or move a piece associated with a memory pool. */
6519*a9643ea8Slogwang #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
6520*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
6521*a9643ea8Slogwang pool, addrA, addrB, size, 0)
6522*a9643ea8Slogwang
6523*a9643ea8Slogwang /* Return 1 if a mempool exists, else 0. */
6524*a9643ea8Slogwang #define VALGRIND_MEMPOOL_EXISTS(pool) \
6525*a9643ea8Slogwang (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6526*a9643ea8Slogwang VG_USERREQ__MEMPOOL_EXISTS, \
6527*a9643ea8Slogwang pool, 0, 0, 0, 0)
6528*a9643ea8Slogwang
6529*a9643ea8Slogwang /* Mark a piece of memory as being a stack. Returns a stack id.
6530*a9643ea8Slogwang start is the lowest addressable stack byte, end is the highest
6531*a9643ea8Slogwang addressable stack byte. */
6532*a9643ea8Slogwang #define VALGRIND_STACK_REGISTER(start, end) \
6533*a9643ea8Slogwang (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6534*a9643ea8Slogwang VG_USERREQ__STACK_REGISTER, \
6535*a9643ea8Slogwang start, end, 0, 0, 0)
6536*a9643ea8Slogwang
6537*a9643ea8Slogwang /* Unmark the piece of memory associated with a stack id as being a
6538*a9643ea8Slogwang stack. */
6539*a9643ea8Slogwang #define VALGRIND_STACK_DEREGISTER(id) \
6540*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
6541*a9643ea8Slogwang id, 0, 0, 0, 0)
6542*a9643ea8Slogwang
6543*a9643ea8Slogwang /* Change the start and end address of the stack id.
6544*a9643ea8Slogwang start is the new lowest addressable stack byte, end is the new highest
6545*a9643ea8Slogwang addressable stack byte. */
6546*a9643ea8Slogwang #define VALGRIND_STACK_CHANGE(id, start, end) \
6547*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
6548*a9643ea8Slogwang id, start, end, 0, 0)
6549*a9643ea8Slogwang
6550*a9643ea8Slogwang /* Load PDB debug info for Wine PE image_map. */
6551*a9643ea8Slogwang #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
6552*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
6553*a9643ea8Slogwang fd, ptr, total_size, delta, 0)
6554*a9643ea8Slogwang
6555*a9643ea8Slogwang /* Map a code address to a source file name and line number. buf64
6556*a9643ea8Slogwang must point to a 64-byte buffer in the caller's address space. The
6557*a9643ea8Slogwang result will be dumped in there and is guaranteed to be zero
6558*a9643ea8Slogwang terminated. If no info is found, the first byte is set to zero. */
6559*a9643ea8Slogwang #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
6560*a9643ea8Slogwang (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6561*a9643ea8Slogwang VG_USERREQ__MAP_IP_TO_SRCLOC, \
6562*a9643ea8Slogwang addr, buf64, 0, 0, 0)
6563*a9643ea8Slogwang
6564*a9643ea8Slogwang /* Disable error reporting for this thread. Behaves in a stack like
6565*a9643ea8Slogwang way, so you can safely call this multiple times provided that
6566*a9643ea8Slogwang VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
6567*a9643ea8Slogwang to re-enable reporting. The first call of this macro disables
6568*a9643ea8Slogwang reporting. Subsequent calls have no effect except to increase the
6569*a9643ea8Slogwang number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
6570*a9643ea8Slogwang reporting. Child threads do not inherit this setting from their
6571*a9643ea8Slogwang parents -- they are always created with reporting enabled. */
6572*a9643ea8Slogwang #define VALGRIND_DISABLE_ERROR_REPORTING \
6573*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6574*a9643ea8Slogwang 1, 0, 0, 0, 0)
6575*a9643ea8Slogwang
6576*a9643ea8Slogwang /* Re-enable error reporting, as per comments on
6577*a9643ea8Slogwang VALGRIND_DISABLE_ERROR_REPORTING. */
6578*a9643ea8Slogwang #define VALGRIND_ENABLE_ERROR_REPORTING \
6579*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6580*a9643ea8Slogwang -1, 0, 0, 0, 0)
6581*a9643ea8Slogwang
6582*a9643ea8Slogwang /* Execute a monitor command from the client program.
6583*a9643ea8Slogwang If a connection is opened with GDB, the output will be sent
6584*a9643ea8Slogwang according to the output mode set for vgdb.
6585*a9643ea8Slogwang If no connection is opened, output will go to the log output.
6586*a9643ea8Slogwang Returns 1 if command not recognised, 0 otherwise. */
6587*a9643ea8Slogwang #define VALGRIND_MONITOR_COMMAND(command) \
6588*a9643ea8Slogwang VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
6589*a9643ea8Slogwang command, 0, 0, 0, 0)
6590*a9643ea8Slogwang
6591*a9643ea8Slogwang
6592*a9643ea8Slogwang #undef PLAT_x86_darwin
6593*a9643ea8Slogwang #undef PLAT_amd64_darwin
6594*a9643ea8Slogwang #undef PLAT_x86_win32
6595*a9643ea8Slogwang #undef PLAT_amd64_win64
6596*a9643ea8Slogwang #undef PLAT_x86_linux
6597*a9643ea8Slogwang #undef PLAT_amd64_linux
6598*a9643ea8Slogwang #undef PLAT_ppc32_linux
6599*a9643ea8Slogwang #undef PLAT_ppc64be_linux
6600*a9643ea8Slogwang #undef PLAT_ppc64le_linux
6601*a9643ea8Slogwang #undef PLAT_arm_linux
6602*a9643ea8Slogwang #undef PLAT_s390x_linux
6603*a9643ea8Slogwang #undef PLAT_mips32_linux
6604*a9643ea8Slogwang #undef PLAT_mips64_linux
6605*a9643ea8Slogwang
6606*a9643ea8Slogwang #endif /* __VALGRIND_H */
6607