91#define __VALGRIND_MAJOR__ 3
92#define __VALGRIND_MINOR__ 8
111#undef PLAT_x86_darwin
112#undef PLAT_amd64_darwin
115#undef PLAT_amd64_linux
116#undef PLAT_ppc32_linux
117#undef PLAT_ppc64_linux
119#undef PLAT_s390x_linux
120#undef PLAT_mips32_linux
123#if defined(__APPLE__) && defined(__i386__)
124#define PLAT_x86_darwin 1
125#elif defined(__APPLE__) && defined(__x86_64__)
126#define PLAT_amd64_darwin 1
127#elif defined(__MINGW32__) || defined(__CYGWIN32__) || (defined(_WIN32) && defined(_M_IX86))
128#define PLAT_x86_win32 1
129#elif defined(__linux__) && defined(__i386__)
130#define PLAT_x86_linux 1
131#elif defined(__linux__) && defined(__x86_64__)
132#define PLAT_amd64_linux 1
133#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
134#define PLAT_ppc32_linux 1
135#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
136#define PLAT_ppc64_linux 1
137#elif defined(__linux__) && defined(__arm__)
138#define PLAT_arm_linux 1
139#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
140#define PLAT_s390x_linux 1
141#elif defined(__linux__) && defined(__mips__)
142#define PLAT_mips32_linux 1
146#if !defined(NVALGRIND)
170#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
173 (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
174 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); \
177#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
179 (void)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, (_zzq_request), (_zzq_arg1), (_zzq_arg2), (_zzq_arg3), (_zzq_arg4), \
183#if defined(NVALGRIND)
188#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
231#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) || (defined(PLAT_x86_win32) && defined(__GNUC__))
237#define __SPECIAL_INSTRUCTION_PREAMBLE \
238 "roll $3, %%edi ; roll $13, %%edi\n\t" \
239 "roll $29, %%edi ; roll $19, %%edi\n\t"
241#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
244 volatile unsigned int _zzq_args[6]; \
245 volatile unsigned int _zzq_result; \
246 _zzq_args[0] = (unsigned int)(_zzq_request); \
247 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
248 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
249 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
250 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
251 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
252 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
253 "xchgl %%ebx,%%ebx" \
254 : "=d"(_zzq_result) \
255 : "a"(&_zzq_args[0]), "0"(_zzq_default) \
260#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
262 volatile OrigFn *_zzq_orig = &(_zzq_rlval); \
263 volatile unsigned int __addr; \
264 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
265 "xchgl %%ecx,%%ecx" \
269 _zzq_orig->nraddr = __addr; \
272#define VALGRIND_CALL_NOREDIR_EAX \
273 __SPECIAL_INSTRUCTION_PREAMBLE \
275 "xchgl %%edx,%%edx\n\t"
277#define VALGRIND_VEX_INJECT_IR() \
279 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE "xchgl %%edi,%%edi\n\t" : : : "cc", "memory"); \
286#if defined(PLAT_x86_win32) && !defined(__GNUC__)
294#define __SPECIAL_INSTRUCTION_PREAMBLE __asm rol edi, 3 __asm rol edi, 13 __asm rol edi, 29 __asm rol edi, 19
296#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
298 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
299 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), (uintptr_t)(_zzq_arg4), \
300 (uintptr_t)(_zzq_arg5))
302static __inline uintptr_t valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
303 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2, uintptr_t _zzq_arg3,
304 uintptr_t _zzq_arg4, uintptr_t _zzq_arg5)
306 volatile uintptr_t _zzq_args[6];
307 volatile unsigned int _zzq_result;
308 _zzq_args[0] = (uintptr_t)(_zzq_request);
309 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
310 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
311 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
312 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
313 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
314 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
315 __SPECIAL_INSTRUCTION_PREAMBLE
318 __asm mov _zzq_result, edx
323#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
325 volatile OrigFn *_zzq_orig = &(_zzq_rlval); \
326 volatile unsigned int __addr; \
327 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
329 __asm mov __addr, eax} \
330 _zzq_orig->nraddr = __addr; \
333#define VALGRIND_CALL_NOREDIR_EAX ERROR
335#define VALGRIND_VEX_INJECT_IR() \
337 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
338 __asm xchg edi,edi} \
342#error Unsupported compiler.
349#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
352 unsigned long long int nraddr;
355#define __SPECIAL_INSTRUCTION_PREAMBLE \
356 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
357 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
359#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
362 volatile unsigned long long int _zzq_args[6]; \
363 volatile unsigned long long int _zzq_result; \
364 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
365 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
366 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
367 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
368 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
369 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
370 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
371 "xchgq %%rbx,%%rbx" \
372 : "=d"(_zzq_result) \
373 : "a"(&_zzq_args[0]), "0"(_zzq_default) \
378#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
380 volatile OrigFn *_zzq_orig = &(_zzq_rlval); \
381 volatile unsigned long long int __addr; \
382 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
383 "xchgq %%rcx,%%rcx" \
387 _zzq_orig->nraddr = __addr; \
390#define VALGRIND_CALL_NOREDIR_RAX \
391 __SPECIAL_INSTRUCTION_PREAMBLE \
393 "xchgq %%rdx,%%rdx\n\t"
395#define VALGRIND_VEX_INJECT_IR() \
397 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE "xchgq %%rdi,%%rdi\n\t" : : : "cc", "memory"); \
404#if defined(PLAT_ppc32_linux)
410#define __SPECIAL_INSTRUCTION_PREAMBLE \
411 "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
412 "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
414#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
418 unsigned int _zzq_args[6]; \
419 unsigned int _zzq_result; \
420 unsigned int *_zzq_ptr; \
421 _zzq_args[0] = (unsigned int)(_zzq_request); \
422 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
423 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
424 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
425 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
426 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
427 _zzq_ptr = _zzq_args; \
428 __asm__ volatile("mr 3,%1\n\t" \
430 __SPECIAL_INSTRUCTION_PREAMBLE \
433 : "=b"(_zzq_result) \
434 : "b"(_zzq_default), "b"(_zzq_ptr) \
435 : "cc", "memory", "r3", "r4"); \
439#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
441 volatile OrigFn *_zzq_orig = &(_zzq_rlval); \
442 unsigned int __addr; \
443 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
448 : "cc", "memory", "r3"); \
449 _zzq_orig->nraddr = __addr; \
452#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
453 __SPECIAL_INSTRUCTION_PREAMBLE \
457#define VALGRIND_VEX_INJECT_IR() \
459 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE "or 5,5,5\n\t"); \
466#if defined(PLAT_ppc64_linux)
469 unsigned long long int nraddr;
470 unsigned long long int r2;
473#define __SPECIAL_INSTRUCTION_PREAMBLE \
474 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
475 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
477#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
481 unsigned long long int _zzq_args[6]; \
482 unsigned long long int _zzq_result; \
483 unsigned long long int *_zzq_ptr; \
484 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
485 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
486 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
487 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
488 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
489 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
490 _zzq_ptr = _zzq_args; \
491 __asm__ volatile("mr 3,%1\n\t" \
493 __SPECIAL_INSTRUCTION_PREAMBLE \
496 : "=b"(_zzq_result) \
497 : "b"(_zzq_default), "b"(_zzq_ptr) \
498 : "cc", "memory", "r3", "r4"); \
502#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
504 volatile OrigFn *_zzq_orig = &(_zzq_rlval); \
505 unsigned long long int __addr; \
506 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
511 : "cc", "memory", "r3"); \
512 _zzq_orig->nraddr = __addr; \
513 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE
\
518 : "cc", "memory", "r3"); \
519 _zzq_orig->r2 = __addr; \
522#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
523 __SPECIAL_INSTRUCTION_PREAMBLE \
527#define VALGRIND_VEX_INJECT_IR() \
529 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE "or 5,5,5\n\t"); \
536#if defined(PLAT_arm_linux)
542#define __SPECIAL_INSTRUCTION_PREAMBLE \
543 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
544 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
546#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
550 volatile unsigned int _zzq_args[6]; \
551 volatile unsigned int _zzq_result; \
552 _zzq_args[0] = (unsigned int)(_zzq_request); \
553 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
554 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
555 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
556 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
557 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
558 __asm__ volatile("mov r3, %1\n\t" \
560 __SPECIAL_INSTRUCTION_PREAMBLE \
561 "orr r10, r10, r10\n\t" \
563 : "=r"(_zzq_result) \
564 : "r"(_zzq_default), "r"(&_zzq_args[0]) \
565 : "cc", "memory", "r3", "r4"); \
569#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
571 volatile OrigFn *_zzq_orig = &(_zzq_rlval); \
572 unsigned int __addr; \
573 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
574 "orr r11, r11, r11\n\t" \
578 : "cc", "memory", "r3"); \
579 _zzq_orig->nraddr = __addr; \
582#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
583 __SPECIAL_INSTRUCTION_PREAMBLE \
585 "orr r12, r12, r12\n\t"
587#define VALGRIND_VEX_INJECT_IR() \
589 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE "orr r9, r9, r9\n\t" : : : "cc", "memory"); \
596#if defined(PLAT_s390x_linux)
599 unsigned long long int nraddr;
606#define __SPECIAL_INSTRUCTION_PREAMBLE \
612#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
613#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
614#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
615#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
617#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
620 volatile unsigned long long int _zzq_args[6]; \
621 volatile unsigned long long int _zzq_result; \
622 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
623 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
624 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
625 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
626 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
627 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
630 "lgr 3,%2\n\t" __SPECIAL_INSTRUCTION_PREAMBLE __CLIENT_REQUEST_CODE
\
632 : "=d"(_zzq_result) \
633 : "a"(&_zzq_args[0]), "0"(_zzq_default) \
634 : "cc", "2", "3", "memory"); \
638#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
640 volatile OrigFn *_zzq_orig = &(_zzq_rlval); \
641 volatile unsigned long long int __addr; \
642 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE __GET_NR_CONTEXT_CODE "lgr %0, 3\n\t" \
645 : "cc", "3", "memory"); \
646 _zzq_orig->nraddr = __addr; \
649#define VALGRIND_CALL_NOREDIR_R1 \
650 __SPECIAL_INSTRUCTION_PREAMBLE \
653#define VALGRIND_VEX_INJECT_IR() \
655 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE __VEX_INJECT_IR_CODE); \
662#if defined(PLAT_mips32_linux)
672#define __SPECIAL_INSTRUCTION_PREAMBLE \
673 "srl $0, $0, 13\n\t" \
674 "srl $0, $0, 29\n\t" \
675 "srl $0, $0, 3\n\t" \
678#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, \
681 volatile unsigned int _zzq_args[6]; \
682 volatile unsigned int _zzq_result; \
683 _zzq_args[0] = (unsigned int)(_zzq_request); \
684 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
685 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
686 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
687 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
688 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
689 __asm__ volatile("move $11, %1\n\t" \
691 __SPECIAL_INSTRUCTION_PREAMBLE \
692 "or $13, $13, $13\n\t" \
694 : "=r"(_zzq_result) \
695 : "r"(_zzq_default), "r"(&_zzq_args[0]) \
696 : "cc", "memory", "t3", "t4"); \
700#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
702 volatile OrigFn *_zzq_orig = &(_zzq_rlval); \
703 volatile unsigned int __addr; \
704 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
705 "or $14, $14, $14\n\t" \
709 : "cc", "memory", "t3"); \
710 _zzq_orig->nraddr = __addr; \
713#define VALGRIND_CALL_NOREDIR_T9 \
714 __SPECIAL_INSTRUCTION_PREAMBLE \
716 "or $15, $15, $15\n\t"
718#define VALGRIND_VEX_INJECT_IR() \
720 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE "or $11, $11, $11\n\t"); \
762#define VG_CONCAT4(_aa, _bb, _cc, _dd) _aa##_bb##_cc##_dd
764#define I_WRAP_SONAME_FNNAME_ZU(soname, fnname) VG_CONCAT4(_vgw00000ZU_, soname, _, fnname)
766#define I_WRAP_SONAME_FNNAME_ZZ(soname, fnname) VG_CONCAT4(_vgw00000ZZ_, soname, _, fnname)
772#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
780#define I_REPLACE_SONAME_FNNAME_ZU(soname, fnname) VG_CONCAT4(_vgr00000ZU_, soname, _, fnname)
782#define I_REPLACE_SONAME_FNNAME_ZZ(soname, fnname) VG_CONCAT4(_vgr00000ZZ_, soname, _, fnname)
787#define CALL_FN_v_v(fnptr) \
789 volatile unsigned long _junk; \
790 CALL_FN_W_v(_junk, fnptr); \
793#define CALL_FN_v_W(fnptr, arg1) \
795 volatile unsigned long _junk; \
796 CALL_FN_W_W(_junk, fnptr, arg1); \
799#define CALL_FN_v_WW(fnptr, arg1, arg2) \
801 volatile unsigned long _junk; \
802 CALL_FN_W_WW(_junk, fnptr, arg1, arg2); \
805#define CALL_FN_v_WWW(fnptr, arg1, arg2, arg3) \
807 volatile unsigned long _junk; \
808 CALL_FN_W_WWW(_junk, fnptr, arg1, arg2, arg3); \
811#define CALL_FN_v_WWWW(fnptr, arg1, arg2, arg3, arg4) \
813 volatile unsigned long _junk; \
814 CALL_FN_W_WWWW(_junk, fnptr, arg1, arg2, arg3, arg4); \
817#define CALL_FN_v_5W(fnptr, arg1, arg2, arg3, arg4, arg5) \
819 volatile unsigned long _junk; \
820 CALL_FN_W_5W(_junk, fnptr, arg1, arg2, arg3, arg4, arg5); \
823#define CALL_FN_v_6W(fnptr, arg1, arg2, arg3, arg4, arg5, arg6) \
825 volatile unsigned long _junk; \
826 CALL_FN_W_6W(_junk, fnptr, arg1, arg2, arg3, arg4, arg5, arg6); \
829#define CALL_FN_v_7W(fnptr, arg1, arg2, arg3, arg4, arg5, arg6, arg7) \
831 volatile unsigned long _junk; \
832 CALL_FN_W_7W(_junk, fnptr, arg1, arg2, arg3, arg4, arg5, arg6, arg7); \
837#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
841#define __CALLER_SAVED_REGS "ecx", "edx"
848#define VALGRIND_ALIGN_STACK \
849 "movl %%esp,%%edi\n\t" \
850 "andl $0xfffffff0,%%esp\n\t"
851#define VALGRIND_RESTORE_STACK "movl %%edi,%%esp\n\t"
856#define CALL_FN_W_v(lval, orig) \
858 volatile OrigFn _orig = (orig); \
859 volatile unsigned long _argvec[1]; \
860 volatile unsigned long _res; \
861 _argvec[0] = (unsigned long)_orig.nraddr; \
862 __asm__ volatile(VALGRIND_ALIGN_STACK "movl (%%eax), %%eax\n\t" \
863 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
866 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
867 lval = (__typeof__(lval))_res; \
870#define CALL_FN_W_W(lval, orig, arg1) \
872 volatile OrigFn _orig = (orig); \
873 volatile unsigned long _argvec[2]; \
874 volatile unsigned long _res; \
875 _argvec[0] = (unsigned long)_orig.nraddr; \
876 _argvec[1] = (unsigned long)(arg1); \
877 __asm__ volatile(VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
878 "pushl 4(%%eax)\n\t" \
879 "movl (%%eax), %%eax\n\t"
\
880 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
883 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
884 lval = (__typeof__(lval))_res; \
887#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
889 volatile OrigFn _orig = (orig); \
890 volatile unsigned long _argvec[3]; \
891 volatile unsigned long _res; \
892 _argvec[0] = (unsigned long)_orig.nraddr; \
893 _argvec[1] = (unsigned long)(arg1); \
894 _argvec[2] = (unsigned long)(arg2); \
895 __asm__ volatile(VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
896 "pushl 8(%%eax)\n\t" \
897 "pushl 4(%%eax)\n\t" \
898 "movl (%%eax), %%eax\n\t"
\
899 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
902 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
903 lval = (__typeof__(lval))_res; \
906#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
908 volatile OrigFn _orig = (orig); \
909 volatile unsigned long _argvec[4]; \
910 volatile unsigned long _res; \
911 _argvec[0] = (unsigned long)_orig.nraddr; \
912 _argvec[1] = (unsigned long)(arg1); \
913 _argvec[2] = (unsigned long)(arg2); \
914 _argvec[3] = (unsigned long)(arg3); \
915 __asm__ volatile(VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
916 "pushl 12(%%eax)\n\t" \
917 "pushl 8(%%eax)\n\t" \
918 "pushl 4(%%eax)\n\t" \
919 "movl (%%eax), %%eax\n\t"
\
920 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
923 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
924 lval = (__typeof__(lval))_res; \
927#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
929 volatile OrigFn _orig = (orig); \
930 volatile unsigned long _argvec[5]; \
931 volatile unsigned long _res; \
932 _argvec[0] = (unsigned long)_orig.nraddr; \
933 _argvec[1] = (unsigned long)(arg1); \
934 _argvec[2] = (unsigned long)(arg2); \
935 _argvec[3] = (unsigned long)(arg3); \
936 _argvec[4] = (unsigned long)(arg4); \
937 __asm__ volatile(VALGRIND_ALIGN_STACK "pushl 16(%%eax)\n\t" \
938 "pushl 12(%%eax)\n\t" \
939 "pushl 8(%%eax)\n\t" \
940 "pushl 4(%%eax)\n\t" \
941 "movl (%%eax), %%eax\n\t"
\
942 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
945 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
946 lval = (__typeof__(lval))_res; \
949#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
951 volatile OrigFn _orig = (orig); \
952 volatile unsigned long _argvec[6]; \
953 volatile unsigned long _res; \
954 _argvec[0] = (unsigned long)_orig.nraddr; \
955 _argvec[1] = (unsigned long)(arg1); \
956 _argvec[2] = (unsigned long)(arg2); \
957 _argvec[3] = (unsigned long)(arg3); \
958 _argvec[4] = (unsigned long)(arg4); \
959 _argvec[5] = (unsigned long)(arg5); \
960 __asm__ volatile(VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
961 "pushl 20(%%eax)\n\t" \
962 "pushl 16(%%eax)\n\t" \
963 "pushl 12(%%eax)\n\t" \
964 "pushl 8(%%eax)\n\t" \
965 "pushl 4(%%eax)\n\t" \
966 "movl (%%eax), %%eax\n\t"
\
967 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
970 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
971 lval = (__typeof__(lval))_res; \
974#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6) \
976 volatile OrigFn _orig = (orig); \
977 volatile unsigned long _argvec[7]; \
978 volatile unsigned long _res; \
979 _argvec[0] = (unsigned long)_orig.nraddr; \
980 _argvec[1] = (unsigned long)(arg1); \
981 _argvec[2] = (unsigned long)(arg2); \
982 _argvec[3] = (unsigned long)(arg3); \
983 _argvec[4] = (unsigned long)(arg4); \
984 _argvec[5] = (unsigned long)(arg5); \
985 _argvec[6] = (unsigned long)(arg6); \
986 __asm__ volatile(VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
987 "pushl 24(%%eax)\n\t" \
988 "pushl 20(%%eax)\n\t" \
989 "pushl 16(%%eax)\n\t" \
990 "pushl 12(%%eax)\n\t" \
991 "pushl 8(%%eax)\n\t" \
992 "pushl 4(%%eax)\n\t" \
993 "movl (%%eax), %%eax\n\t"
\
994 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
997 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
998 lval = (__typeof__(lval))_res; \
1001#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7) \
1003 volatile OrigFn _orig = (orig); \
1004 volatile unsigned long _argvec[8]; \
1005 volatile unsigned long _res; \
1006 _argvec[0] = (unsigned long)_orig.nraddr; \
1007 _argvec[1] = (unsigned long)(arg1); \
1008 _argvec[2] = (unsigned long)(arg2); \
1009 _argvec[3] = (unsigned long)(arg3); \
1010 _argvec[4] = (unsigned long)(arg4); \
1011 _argvec[5] = (unsigned long)(arg5); \
1012 _argvec[6] = (unsigned long)(arg6); \
1013 _argvec[7] = (unsigned long)(arg7); \
1014 __asm__ volatile(VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
1015 "pushl 28(%%eax)\n\t" \
1016 "pushl 24(%%eax)\n\t" \
1017 "pushl 20(%%eax)\n\t" \
1018 "pushl 16(%%eax)\n\t" \
1019 "pushl 12(%%eax)\n\t" \
1020 "pushl 8(%%eax)\n\t" \
1021 "pushl 4(%%eax)\n\t" \
1022 "movl (%%eax), %%eax\n\t"
\
1023 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1025 :
"a"(&_argvec[0]) \
1026 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
1027 lval = (__typeof__(lval))_res; \
1030#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) \
1032 volatile OrigFn _orig = (orig); \
1033 volatile unsigned long _argvec[9]; \
1034 volatile unsigned long _res; \
1035 _argvec[0] = (unsigned long)_orig.nraddr; \
1036 _argvec[1] = (unsigned long)(arg1); \
1037 _argvec[2] = (unsigned long)(arg2); \
1038 _argvec[3] = (unsigned long)(arg3); \
1039 _argvec[4] = (unsigned long)(arg4); \
1040 _argvec[5] = (unsigned long)(arg5); \
1041 _argvec[6] = (unsigned long)(arg6); \
1042 _argvec[7] = (unsigned long)(arg7); \
1043 _argvec[8] = (unsigned long)(arg8); \
1044 __asm__ volatile(VALGRIND_ALIGN_STACK "pushl 32(%%eax)\n\t" \
1045 "pushl 28(%%eax)\n\t" \
1046 "pushl 24(%%eax)\n\t" \
1047 "pushl 20(%%eax)\n\t" \
1048 "pushl 16(%%eax)\n\t" \
1049 "pushl 12(%%eax)\n\t" \
1050 "pushl 8(%%eax)\n\t" \
1051 "pushl 4(%%eax)\n\t" \
1052 "movl (%%eax), %%eax\n\t"
\
1053 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1055 :
"a"(&_argvec[0]) \
1056 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
1057 lval = (__typeof__(lval))_res; \
1060#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) \
1062 volatile OrigFn _orig = (orig); \
1063 volatile unsigned long _argvec[10]; \
1064 volatile unsigned long _res; \
1065 _argvec[0] = (unsigned long)_orig.nraddr; \
1066 _argvec[1] = (unsigned long)(arg1); \
1067 _argvec[2] = (unsigned long)(arg2); \
1068 _argvec[3] = (unsigned long)(arg3); \
1069 _argvec[4] = (unsigned long)(arg4); \
1070 _argvec[5] = (unsigned long)(arg5); \
1071 _argvec[6] = (unsigned long)(arg6); \
1072 _argvec[7] = (unsigned long)(arg7); \
1073 _argvec[8] = (unsigned long)(arg8); \
1074 _argvec[9] = (unsigned long)(arg9); \
1075 __asm__ volatile(VALGRIND_ALIGN_STACK "subl $12, %%esp\n\t" \
1076 "pushl 36(%%eax)\n\t" \
1077 "pushl 32(%%eax)\n\t" \
1078 "pushl 28(%%eax)\n\t" \
1079 "pushl 24(%%eax)\n\t" \
1080 "pushl 20(%%eax)\n\t" \
1081 "pushl 16(%%eax)\n\t" \
1082 "pushl 12(%%eax)\n\t" \
1083 "pushl 8(%%eax)\n\t" \
1084 "pushl 4(%%eax)\n\t" \
1085 "movl (%%eax), %%eax\n\t"
\
1086 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1088 :
"a"(&_argvec[0]) \
1089 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
1090 lval = (__typeof__(lval))_res; \
1093#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10) \
1095 volatile OrigFn _orig = (orig); \
1096 volatile unsigned long _argvec[11]; \
1097 volatile unsigned long _res; \
1098 _argvec[0] = (unsigned long)_orig.nraddr; \
1099 _argvec[1] = (unsigned long)(arg1); \
1100 _argvec[2] = (unsigned long)(arg2); \
1101 _argvec[3] = (unsigned long)(arg3); \
1102 _argvec[4] = (unsigned long)(arg4); \
1103 _argvec[5] = (unsigned long)(arg5); \
1104 _argvec[6] = (unsigned long)(arg6); \
1105 _argvec[7] = (unsigned long)(arg7); \
1106 _argvec[8] = (unsigned long)(arg8); \
1107 _argvec[9] = (unsigned long)(arg9); \
1108 _argvec[10] = (unsigned long)(arg10); \
1109 __asm__ volatile(VALGRIND_ALIGN_STACK "subl $8, %%esp\n\t" \
1110 "pushl 40(%%eax)\n\t" \
1111 "pushl 36(%%eax)\n\t" \
1112 "pushl 32(%%eax)\n\t" \
1113 "pushl 28(%%eax)\n\t" \
1114 "pushl 24(%%eax)\n\t" \
1115 "pushl 20(%%eax)\n\t" \
1116 "pushl 16(%%eax)\n\t" \
1117 "pushl 12(%%eax)\n\t" \
1118 "pushl 8(%%eax)\n\t" \
1119 "pushl 4(%%eax)\n\t" \
1120 "movl (%%eax), %%eax\n\t"
\
1121 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1123 :
"a"(&_argvec[0]) \
1124 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
1125 lval = (__typeof__(lval))_res; \
1128#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11) \
1130 volatile OrigFn _orig = (orig); \
1131 volatile unsigned long _argvec[12]; \
1132 volatile unsigned long _res; \
1133 _argvec[0] = (unsigned long)_orig.nraddr; \
1134 _argvec[1] = (unsigned long)(arg1); \
1135 _argvec[2] = (unsigned long)(arg2); \
1136 _argvec[3] = (unsigned long)(arg3); \
1137 _argvec[4] = (unsigned long)(arg4); \
1138 _argvec[5] = (unsigned long)(arg5); \
1139 _argvec[6] = (unsigned long)(arg6); \
1140 _argvec[7] = (unsigned long)(arg7); \
1141 _argvec[8] = (unsigned long)(arg8); \
1142 _argvec[9] = (unsigned long)(arg9); \
1143 _argvec[10] = (unsigned long)(arg10); \
1144 _argvec[11] = (unsigned long)(arg11); \
1145 __asm__ volatile(VALGRIND_ALIGN_STACK "subl $4, %%esp\n\t" \
1146 "pushl 44(%%eax)\n\t" \
1147 "pushl 40(%%eax)\n\t" \
1148 "pushl 36(%%eax)\n\t" \
1149 "pushl 32(%%eax)\n\t" \
1150 "pushl 28(%%eax)\n\t" \
1151 "pushl 24(%%eax)\n\t" \
1152 "pushl 20(%%eax)\n\t" \
1153 "pushl 16(%%eax)\n\t" \
1154 "pushl 12(%%eax)\n\t" \
1155 "pushl 8(%%eax)\n\t" \
1156 "pushl 4(%%eax)\n\t" \
1157 "movl (%%eax), %%eax\n\t"
\
1158 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1160 :
"a"(&_argvec[0]) \
1161 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
1162 lval = (__typeof__(lval))_res; \
1165#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12) \
1167 volatile OrigFn _orig = (orig); \
1168 volatile unsigned long _argvec[13]; \
1169 volatile unsigned long _res; \
1170 _argvec[0] = (unsigned long)_orig.nraddr; \
1171 _argvec[1] = (unsigned long)(arg1); \
1172 _argvec[2] = (unsigned long)(arg2); \
1173 _argvec[3] = (unsigned long)(arg3); \
1174 _argvec[4] = (unsigned long)(arg4); \
1175 _argvec[5] = (unsigned long)(arg5); \
1176 _argvec[6] = (unsigned long)(arg6); \
1177 _argvec[7] = (unsigned long)(arg7); \
1178 _argvec[8] = (unsigned long)(arg8); \
1179 _argvec[9] = (unsigned long)(arg9); \
1180 _argvec[10] = (unsigned long)(arg10); \
1181 _argvec[11] = (unsigned long)(arg11); \
1182 _argvec[12] = (unsigned long)(arg12); \
1183 __asm__ volatile(VALGRIND_ALIGN_STACK "pushl 48(%%eax)\n\t" \
1184 "pushl 44(%%eax)\n\t" \
1185 "pushl 40(%%eax)\n\t" \
1186 "pushl 36(%%eax)\n\t" \
1187 "pushl 32(%%eax)\n\t" \
1188 "pushl 28(%%eax)\n\t" \
1189 "pushl 24(%%eax)\n\t" \
1190 "pushl 20(%%eax)\n\t" \
1191 "pushl 16(%%eax)\n\t" \
1192 "pushl 12(%%eax)\n\t" \
1193 "pushl 8(%%eax)\n\t" \
1194 "pushl 4(%%eax)\n\t" \
1195 "movl (%%eax), %%eax\n\t"
\
1196 VALGRIND_CALL_NOREDIR_EAX VALGRIND_RESTORE_STACK \
1198 :
"a"(&_argvec[0]) \
1199 :
"cc", "memory", __CALLER_SAVED_REGS, "edi"); \
1200 lval = (__typeof__(lval))_res; \
1207#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1212#define __CALLER_SAVED_REGS "rcx", "rdx", "rsi", "rdi", "r8", "r9", "r10", "r11"
1268#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1269#define __FRAME_POINTER , "r"(__builtin_dwarf_cfa())
1270#define VALGRIND_CFI_PROLOGUE \
1271 "movq %%rbp, %%r15\n\t" \
1272 "movq %2, %%rbp\n\t" \
1273 ".cfi_remember_state\n\t" \
1274 ".cfi_def_cfa rbp, 0\n\t"
1275#define VALGRIND_CFI_EPILOGUE \
1276 "movq %%r15, %%rbp\n\t" \
1277 ".cfi_restore_state\n\t"
1279#define __FRAME_POINTER
1280#define VALGRIND_CFI_PROLOGUE
1281#define VALGRIND_CFI_EPILOGUE
1289#define VALGRIND_ALIGN_STACK \
1290 "movq %%rsp,%%r14\n\t" \
1291 "andq $0xfffffffffffffff0,%%rsp\n\t"
1292#define VALGRIND_RESTORE_STACK "movq %%r14,%%rsp\n\t"
1318#define CALL_FN_W_v(lval, orig) \
1320 volatile OrigFn _orig = (orig); \
1321 volatile unsigned long _argvec[1]; \
1322 volatile unsigned long _res; \
1323 _argvec[0] = (unsigned long)_orig.nraddr; \
1324 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1325 "movq (%%rax), %%rax\n\t"
\
1326 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1328 :
"a"(&_argvec[0])__FRAME_POINTER \
1329 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1330 lval = (__typeof__(lval))_res; \
1333#define CALL_FN_W_W(lval, orig, arg1) \
1335 volatile OrigFn _orig = (orig); \
1336 volatile unsigned long _argvec[2]; \
1337 volatile unsigned long _res; \
1338 _argvec[0] = (unsigned long)_orig.nraddr; \
1339 _argvec[1] = (unsigned long)(arg1); \
1340 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1341 "movq 8(%%rax), %%rdi\n\t" \
1342 "movq (%%rax), %%rax\n\t"
\
1343 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1345 :
"a"(&_argvec[0])__FRAME_POINTER \
1346 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1347 lval = (__typeof__(lval))_res; \
1350#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
1352 volatile OrigFn _orig = (orig); \
1353 volatile unsigned long _argvec[3]; \
1354 volatile unsigned long _res; \
1355 _argvec[0] = (unsigned long)_orig.nraddr; \
1356 _argvec[1] = (unsigned long)(arg1); \
1357 _argvec[2] = (unsigned long)(arg2); \
1358 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1359 "movq 16(%%rax), %%rsi\n\t" \
1360 "movq 8(%%rax), %%rdi\n\t" \
1361 "movq (%%rax), %%rax\n\t"
\
1362 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1364 :
"a"(&_argvec[0])__FRAME_POINTER \
1365 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1366 lval = (__typeof__(lval))_res; \
1369#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
1371 volatile OrigFn _orig = (orig); \
1372 volatile unsigned long _argvec[4]; \
1373 volatile unsigned long _res; \
1374 _argvec[0] = (unsigned long)_orig.nraddr; \
1375 _argvec[1] = (unsigned long)(arg1); \
1376 _argvec[2] = (unsigned long)(arg2); \
1377 _argvec[3] = (unsigned long)(arg3); \
1378 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1379 "movq 24(%%rax), %%rdx\n\t" \
1380 "movq 16(%%rax), %%rsi\n\t" \
1381 "movq 8(%%rax), %%rdi\n\t" \
1382 "movq (%%rax), %%rax\n\t"
\
1383 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1385 :
"a"(&_argvec[0])__FRAME_POINTER \
1386 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1387 lval = (__typeof__(lval))_res; \
1390#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
1392 volatile OrigFn _orig = (orig); \
1393 volatile unsigned long _argvec[5]; \
1394 volatile unsigned long _res; \
1395 _argvec[0] = (unsigned long)_orig.nraddr; \
1396 _argvec[1] = (unsigned long)(arg1); \
1397 _argvec[2] = (unsigned long)(arg2); \
1398 _argvec[3] = (unsigned long)(arg3); \
1399 _argvec[4] = (unsigned long)(arg4); \
1400 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1401 "movq 32(%%rax), %%rcx\n\t" \
1402 "movq 24(%%rax), %%rdx\n\t" \
1403 "movq 16(%%rax), %%rsi\n\t" \
1404 "movq 8(%%rax), %%rdi\n\t" \
1405 "movq (%%rax), %%rax\n\t"
\
1406 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1408 :
"a"(&_argvec[0])__FRAME_POINTER \
1409 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1410 lval = (__typeof__(lval))_res; \
1413#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
1415 volatile OrigFn _orig = (orig); \
1416 volatile unsigned long _argvec[6]; \
1417 volatile unsigned long _res; \
1418 _argvec[0] = (unsigned long)_orig.nraddr; \
1419 _argvec[1] = (unsigned long)(arg1); \
1420 _argvec[2] = (unsigned long)(arg2); \
1421 _argvec[3] = (unsigned long)(arg3); \
1422 _argvec[4] = (unsigned long)(arg4); \
1423 _argvec[5] = (unsigned long)(arg5); \
1424 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1425 "movq 40(%%rax), %%r8\n\t" \
1426 "movq 32(%%rax), %%rcx\n\t" \
1427 "movq 24(%%rax), %%rdx\n\t" \
1428 "movq 16(%%rax), %%rsi\n\t" \
1429 "movq 8(%%rax), %%rdi\n\t" \
1430 "movq (%%rax), %%rax\n\t"
\
1431 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1433 :
"a"(&_argvec[0])__FRAME_POINTER \
1434 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1435 lval = (__typeof__(lval))_res; \
1438#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6) \
1440 volatile OrigFn _orig = (orig); \
1441 volatile unsigned long _argvec[7]; \
1442 volatile unsigned long _res; \
1443 _argvec[0] = (unsigned long)_orig.nraddr; \
1444 _argvec[1] = (unsigned long)(arg1); \
1445 _argvec[2] = (unsigned long)(arg2); \
1446 _argvec[3] = (unsigned long)(arg3); \
1447 _argvec[4] = (unsigned long)(arg4); \
1448 _argvec[5] = (unsigned long)(arg5); \
1449 _argvec[6] = (unsigned long)(arg6); \
1450 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1451 "movq 48(%%rax), %%r9\n\t" \
1452 "movq 40(%%rax), %%r8\n\t" \
1453 "movq 32(%%rax), %%rcx\n\t" \
1454 "movq 24(%%rax), %%rdx\n\t" \
1455 "movq 16(%%rax), %%rsi\n\t" \
1456 "movq 8(%%rax), %%rdi\n\t" \
1457 "movq (%%rax), %%rax\n\t"
\
1458 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1460 :
"a"(&_argvec[0])__FRAME_POINTER \
1461 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1462 lval = (__typeof__(lval))_res; \
1465#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7) \
1467 volatile OrigFn _orig = (orig); \
1468 volatile unsigned long _argvec[8]; \
1469 volatile unsigned long _res; \
1470 _argvec[0] = (unsigned long)_orig.nraddr; \
1471 _argvec[1] = (unsigned long)(arg1); \
1472 _argvec[2] = (unsigned long)(arg2); \
1473 _argvec[3] = (unsigned long)(arg3); \
1474 _argvec[4] = (unsigned long)(arg4); \
1475 _argvec[5] = (unsigned long)(arg5); \
1476 _argvec[6] = (unsigned long)(arg6); \
1477 _argvec[7] = (unsigned long)(arg7); \
1478 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1479 "pushq 56(%%rax)\n\t" \
1480 "movq 48(%%rax), %%r9\n\t" \
1481 "movq 40(%%rax), %%r8\n\t" \
1482 "movq 32(%%rax), %%rcx\n\t" \
1483 "movq 24(%%rax), %%rdx\n\t" \
1484 "movq 16(%%rax), %%rsi\n\t" \
1485 "movq 8(%%rax), %%rdi\n\t" \
1486 "movq (%%rax), %%rax\n\t"
\
1487 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1489 :
"a"(&_argvec[0])__FRAME_POINTER \
1490 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1491 lval = (__typeof__(lval))_res; \
1494#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) \
1496 volatile OrigFn _orig = (orig); \
1497 volatile unsigned long _argvec[9]; \
1498 volatile unsigned long _res; \
1499 _argvec[0] = (unsigned long)_orig.nraddr; \
1500 _argvec[1] = (unsigned long)(arg1); \
1501 _argvec[2] = (unsigned long)(arg2); \
1502 _argvec[3] = (unsigned long)(arg3); \
1503 _argvec[4] = (unsigned long)(arg4); \
1504 _argvec[5] = (unsigned long)(arg5); \
1505 _argvec[6] = (unsigned long)(arg6); \
1506 _argvec[7] = (unsigned long)(arg7); \
1507 _argvec[8] = (unsigned long)(arg8); \
1508 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1509 "pushq 64(%%rax)\n\t" \
1510 "pushq 56(%%rax)\n\t" \
1511 "movq 48(%%rax), %%r9\n\t" \
1512 "movq 40(%%rax), %%r8\n\t" \
1513 "movq 32(%%rax), %%rcx\n\t" \
1514 "movq 24(%%rax), %%rdx\n\t" \
1515 "movq 16(%%rax), %%rsi\n\t" \
1516 "movq 8(%%rax), %%rdi\n\t" \
1517 "movq (%%rax), %%rax\n\t"
\
1518 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1520 :
"a"(&_argvec[0])__FRAME_POINTER \
1521 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1522 lval = (__typeof__(lval))_res; \
1525#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) \
1527 volatile OrigFn _orig = (orig); \
1528 volatile unsigned long _argvec[10]; \
1529 volatile unsigned long _res; \
1530 _argvec[0] = (unsigned long)_orig.nraddr; \
1531 _argvec[1] = (unsigned long)(arg1); \
1532 _argvec[2] = (unsigned long)(arg2); \
1533 _argvec[3] = (unsigned long)(arg3); \
1534 _argvec[4] = (unsigned long)(arg4); \
1535 _argvec[5] = (unsigned long)(arg5); \
1536 _argvec[6] = (unsigned long)(arg6); \
1537 _argvec[7] = (unsigned long)(arg7); \
1538 _argvec[8] = (unsigned long)(arg8); \
1539 _argvec[9] = (unsigned long)(arg9); \
1540 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1541 "pushq 72(%%rax)\n\t" \
1542 "pushq 64(%%rax)\n\t" \
1543 "pushq 56(%%rax)\n\t" \
1544 "movq 48(%%rax), %%r9\n\t" \
1545 "movq 40(%%rax), %%r8\n\t" \
1546 "movq 32(%%rax), %%rcx\n\t" \
1547 "movq 24(%%rax), %%rdx\n\t" \
1548 "movq 16(%%rax), %%rsi\n\t" \
1549 "movq 8(%%rax), %%rdi\n\t" \
1550 "movq (%%rax), %%rax\n\t"
\
1551 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1553 :
"a"(&_argvec[0])__FRAME_POINTER \
1554 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1555 lval = (__typeof__(lval))_res; \
1558#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10) \
1560 volatile OrigFn _orig = (orig); \
1561 volatile unsigned long _argvec[11]; \
1562 volatile unsigned long _res; \
1563 _argvec[0] = (unsigned long)_orig.nraddr; \
1564 _argvec[1] = (unsigned long)(arg1); \
1565 _argvec[2] = (unsigned long)(arg2); \
1566 _argvec[3] = (unsigned long)(arg3); \
1567 _argvec[4] = (unsigned long)(arg4); \
1568 _argvec[5] = (unsigned long)(arg5); \
1569 _argvec[6] = (unsigned long)(arg6); \
1570 _argvec[7] = (unsigned long)(arg7); \
1571 _argvec[8] = (unsigned long)(arg8); \
1572 _argvec[9] = (unsigned long)(arg9); \
1573 _argvec[10] = (unsigned long)(arg10); \
1574 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1575 "pushq 80(%%rax)\n\t" \
1576 "pushq 72(%%rax)\n\t" \
1577 "pushq 64(%%rax)\n\t" \
1578 "pushq 56(%%rax)\n\t" \
1579 "movq 48(%%rax), %%r9\n\t" \
1580 "movq 40(%%rax), %%r8\n\t" \
1581 "movq 32(%%rax), %%rcx\n\t" \
1582 "movq 24(%%rax), %%rdx\n\t" \
1583 "movq 16(%%rax), %%rsi\n\t" \
1584 "movq 8(%%rax), %%rdi\n\t" \
1585 "movq (%%rax), %%rax\n\t"
\
1586 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1588 :
"a"(&_argvec[0])__FRAME_POINTER \
1589 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1590 lval = (__typeof__(lval))_res; \
1593#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11) \
1595 volatile OrigFn _orig = (orig); \
1596 volatile unsigned long _argvec[12]; \
1597 volatile unsigned long _res; \
1598 _argvec[0] = (unsigned long)_orig.nraddr; \
1599 _argvec[1] = (unsigned long)(arg1); \
1600 _argvec[2] = (unsigned long)(arg2); \
1601 _argvec[3] = (unsigned long)(arg3); \
1602 _argvec[4] = (unsigned long)(arg4); \
1603 _argvec[5] = (unsigned long)(arg5); \
1604 _argvec[6] = (unsigned long)(arg6); \
1605 _argvec[7] = (unsigned long)(arg7); \
1606 _argvec[8] = (unsigned long)(arg8); \
1607 _argvec[9] = (unsigned long)(arg9); \
1608 _argvec[10] = (unsigned long)(arg10); \
1609 _argvec[11] = (unsigned long)(arg11); \
1610 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $136,%%rsp\n\t" \
1611 "pushq 88(%%rax)\n\t" \
1612 "pushq 80(%%rax)\n\t" \
1613 "pushq 72(%%rax)\n\t" \
1614 "pushq 64(%%rax)\n\t" \
1615 "pushq 56(%%rax)\n\t" \
1616 "movq 48(%%rax), %%r9\n\t" \
1617 "movq 40(%%rax), %%r8\n\t" \
1618 "movq 32(%%rax), %%rcx\n\t" \
1619 "movq 24(%%rax), %%rdx\n\t" \
1620 "movq 16(%%rax), %%rsi\n\t" \
1621 "movq 8(%%rax), %%rdi\n\t" \
1622 "movq (%%rax), %%rax\n\t"
\
1623 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1625 :
"a"(&_argvec[0])__FRAME_POINTER \
1626 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1627 lval = (__typeof__(lval))_res; \
1630#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12) \
1632 volatile OrigFn _orig = (orig); \
1633 volatile unsigned long _argvec[13]; \
1634 volatile unsigned long _res; \
1635 _argvec[0] = (unsigned long)_orig.nraddr; \
1636 _argvec[1] = (unsigned long)(arg1); \
1637 _argvec[2] = (unsigned long)(arg2); \
1638 _argvec[3] = (unsigned long)(arg3); \
1639 _argvec[4] = (unsigned long)(arg4); \
1640 _argvec[5] = (unsigned long)(arg5); \
1641 _argvec[6] = (unsigned long)(arg6); \
1642 _argvec[7] = (unsigned long)(arg7); \
1643 _argvec[8] = (unsigned long)(arg8); \
1644 _argvec[9] = (unsigned long)(arg9); \
1645 _argvec[10] = (unsigned long)(arg10); \
1646 _argvec[11] = (unsigned long)(arg11); \
1647 _argvec[12] = (unsigned long)(arg12); \
1648 __asm__ volatile(VALGRIND_CFI_PROLOGUE VALGRIND_ALIGN_STACK "subq $128,%%rsp\n\t" \
1649 "pushq 96(%%rax)\n\t" \
1650 "pushq 88(%%rax)\n\t" \
1651 "pushq 80(%%rax)\n\t" \
1652 "pushq 72(%%rax)\n\t" \
1653 "pushq 64(%%rax)\n\t" \
1654 "pushq 56(%%rax)\n\t" \
1655 "movq 48(%%rax), %%r9\n\t" \
1656 "movq 40(%%rax), %%r8\n\t" \
1657 "movq 32(%%rax), %%rcx\n\t" \
1658 "movq 24(%%rax), %%rdx\n\t" \
1659 "movq 16(%%rax), %%rsi\n\t" \
1660 "movq 8(%%rax), %%rdi\n\t" \
1661 "movq (%%rax), %%rax\n\t"
\
1662 VALGRIND_CALL_NOREDIR_RAX VALGRIND_RESTORE_STACK VALGRIND_CFI_EPILOGUE \
1664 :
"a"(&_argvec[0])__FRAME_POINTER \
1665 :
"cc", "memory", __CALLER_SAVED_REGS, "r14", "r15"); \
1666 lval = (__typeof__(lval))_res; \
1673#if defined(PLAT_ppc32_linux)
1699#define __CALLER_SAVED_REGS \
1700 "lr", "ctr", "xer", "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "r0", "r2", "r3", "r4", "r5", "r6", \
1701 "r7", "r8", "r9", "r10", "r11", "r12", "r13"
1708#define VALGRIND_ALIGN_STACK \
1710 "rlwinm 1,1,0,0,27\n\t"
1711#define VALGRIND_RESTORE_STACK "mr 1,28\n\t"
1716#define CALL_FN_W_v(lval, orig) \
1718 volatile OrigFn _orig = (orig); \
1719 volatile unsigned long _argvec[1]; \
1720 volatile unsigned long _res; \
1721 _argvec[0] = (unsigned long)_orig.nraddr; \
1722 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1723 "lwz 11,0(11)\n\t"
\
1724 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1726 :
"r"(&_argvec[0]) \
1727 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1728 lval = (__typeof__(lval))_res; \
1731#define CALL_FN_W_W(lval, orig, arg1) \
1733 volatile OrigFn _orig = (orig); \
1734 volatile unsigned long _argvec[2]; \
1735 volatile unsigned long _res; \
1736 _argvec[0] = (unsigned long)_orig.nraddr; \
1737 _argvec[1] = (unsigned long)arg1; \
1738 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1740 "lwz 11,0(11)\n\t" \
1741 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1743 :
"r"(&_argvec[0]) \
1744 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1745 lval = (__typeof__(lval))_res; \
1748#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
1750 volatile OrigFn _orig = (orig); \
1751 volatile unsigned long _argvec[3]; \
1752 volatile unsigned long _res; \
1753 _argvec[0] = (unsigned long)_orig.nraddr; \
1754 _argvec[1] = (unsigned long)arg1; \
1755 _argvec[2] = (unsigned long)arg2; \
1756 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1759 "lwz 11,0(11)\n\t"
\
1760 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1762 :
"r"(&_argvec[0]) \
1763 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1764 lval = (__typeof__(lval))_res; \
1767#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
1769 volatile OrigFn _orig = (orig); \
1770 volatile unsigned long _argvec[4]; \
1771 volatile unsigned long _res; \
1772 _argvec[0] = (unsigned long)_orig.nraddr; \
1773 _argvec[1] = (unsigned long)arg1; \
1774 _argvec[2] = (unsigned long)arg2; \
1775 _argvec[3] = (unsigned long)arg3; \
1776 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1779 "lwz 5,12(11)\n\t" \
1780 "lwz 11,0(11)\n\t"
\
1781 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1783 :
"r"(&_argvec[0]) \
1784 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1785 lval = (__typeof__(lval))_res; \
1788#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
1790 volatile OrigFn _orig = (orig); \
1791 volatile unsigned long _argvec[5]; \
1792 volatile unsigned long _res; \
1793 _argvec[0] = (unsigned long)_orig.nraddr; \
1794 _argvec[1] = (unsigned long)arg1; \
1795 _argvec[2] = (unsigned long)arg2; \
1796 _argvec[3] = (unsigned long)arg3; \
1797 _argvec[4] = (unsigned long)arg4; \
1798 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1801 "lwz 5,12(11)\n\t" \
1802 "lwz 6,16(11)\n\t"
\
1803 "lwz 11,0(11)\n\t" \
1804 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1806 :
"r"(&_argvec[0]) \
1807 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1808 lval = (__typeof__(lval))_res; \
1811#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
1813 volatile OrigFn _orig = (orig); \
1814 volatile unsigned long _argvec[6]; \
1815 volatile unsigned long _res; \
1816 _argvec[0] = (unsigned long)_orig.nraddr; \
1817 _argvec[1] = (unsigned long)arg1; \
1818 _argvec[2] = (unsigned long)arg2; \
1819 _argvec[3] = (unsigned long)arg3; \
1820 _argvec[4] = (unsigned long)arg4; \
1821 _argvec[5] = (unsigned long)arg5; \
1822 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1825 "lwz 5,12(11)\n\t" \
1826 "lwz 6,16(11)\n\t"
\
1827 "lwz 7,20(11)\n\t" \
1828 "lwz 11,0(11)\n\t"
\
1829 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1831 :
"r"(&_argvec[0]) \
1832 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1833 lval = (__typeof__(lval))_res; \
1836#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6) \
1838 volatile OrigFn _orig = (orig); \
1839 volatile unsigned long _argvec[7]; \
1840 volatile unsigned long _res; \
1841 _argvec[0] = (unsigned long)_orig.nraddr; \
1842 _argvec[1] = (unsigned long)arg1; \
1843 _argvec[2] = (unsigned long)arg2; \
1844 _argvec[3] = (unsigned long)arg3; \
1845 _argvec[4] = (unsigned long)arg4; \
1846 _argvec[5] = (unsigned long)arg5; \
1847 _argvec[6] = (unsigned long)arg6; \
1848 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1851 "lwz 5,12(11)\n\t" \
1852 "lwz 6,16(11)\n\t"
\
1853 "lwz 7,20(11)\n\t" \
1854 "lwz 8,24(11)\n\t" \
1855 "lwz 11,0(11)\n\t"
\
1856 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1858 :
"r"(&_argvec[0]) \
1859 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1860 lval = (__typeof__(lval))_res; \
1863#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7) \
1865 volatile OrigFn _orig = (orig); \
1866 volatile unsigned long _argvec[8]; \
1867 volatile unsigned long _res; \
1868 _argvec[0] = (unsigned long)_orig.nraddr; \
1869 _argvec[1] = (unsigned long)arg1; \
1870 _argvec[2] = (unsigned long)arg2; \
1871 _argvec[3] = (unsigned long)arg3; \
1872 _argvec[4] = (unsigned long)arg4; \
1873 _argvec[5] = (unsigned long)arg5; \
1874 _argvec[6] = (unsigned long)arg6; \
1875 _argvec[7] = (unsigned long)arg7; \
1876 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1879 "lwz 5,12(11)\n\t" \
1880 "lwz 6,16(11)\n\t"
\
1881 "lwz 7,20(11)\n\t" \
1882 "lwz 8,24(11)\n\t" \
1883 "lwz 9,28(11)\n\t" \
1884 "lwz 11,0(11)\n\t"
\
1885 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1887 :
"r"(&_argvec[0]) \
1888 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1889 lval = (__typeof__(lval))_res; \
1892#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) \
1894 volatile OrigFn _orig = (orig); \
1895 volatile unsigned long _argvec[9]; \
1896 volatile unsigned long _res; \
1897 _argvec[0] = (unsigned long)_orig.nraddr; \
1898 _argvec[1] = (unsigned long)arg1; \
1899 _argvec[2] = (unsigned long)arg2; \
1900 _argvec[3] = (unsigned long)arg3; \
1901 _argvec[4] = (unsigned long)arg4; \
1902 _argvec[5] = (unsigned long)arg5; \
1903 _argvec[6] = (unsigned long)arg6; \
1904 _argvec[7] = (unsigned long)arg7; \
1905 _argvec[8] = (unsigned long)arg8; \
1906 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1909 "lwz 5,12(11)\n\t" \
1910 "lwz 6,16(11)\n\t"
\
1911 "lwz 7,20(11)\n\t" \
1912 "lwz 8,24(11)\n\t" \
1913 "lwz 9,28(11)\n\t" \
1914 "lwz 10,32(11)\n\t"
\
1915 "lwz 11,0(11)\n\t" \
1916 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1918 :
"r"(&_argvec[0]) \
1919 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1920 lval = (__typeof__(lval))_res; \
1923#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) \
1925 volatile OrigFn _orig = (orig); \
1926 volatile unsigned long _argvec[10]; \
1927 volatile unsigned long _res; \
1928 _argvec[0] = (unsigned long)_orig.nraddr; \
1929 _argvec[1] = (unsigned long)arg1; \
1930 _argvec[2] = (unsigned long)arg2; \
1931 _argvec[3] = (unsigned long)arg3; \
1932 _argvec[4] = (unsigned long)arg4; \
1933 _argvec[5] = (unsigned long)arg5; \
1934 _argvec[6] = (unsigned long)arg6; \
1935 _argvec[7] = (unsigned long)arg7; \
1936 _argvec[8] = (unsigned long)arg8; \
1937 _argvec[9] = (unsigned long)arg9; \
1938 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1939 "addi 1,1,-16\n\t"
\
1940 "lwz 3,36(11)\n\t" \
1944 "lwz 5,12(11)\n\t" \
1945 "lwz 6,16(11)\n\t"
\
1946 "lwz 7,20(11)\n\t" \
1947 "lwz 8,24(11)\n\t" \
1948 "lwz 9,28(11)\n\t" \
1949 "lwz 10,32(11)\n\t"
\
1950 "lwz 11,0(11)\n\t" \
1951 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1953 :
"r"(&_argvec[0]) \
1954 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1955 lval = (__typeof__(lval))_res; \
1958#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10) \
1960 volatile OrigFn _orig = (orig); \
1961 volatile unsigned long _argvec[11]; \
1962 volatile unsigned long _res; \
1963 _argvec[0] = (unsigned long)_orig.nraddr; \
1964 _argvec[1] = (unsigned long)arg1; \
1965 _argvec[2] = (unsigned long)arg2; \
1966 _argvec[3] = (unsigned long)arg3; \
1967 _argvec[4] = (unsigned long)arg4; \
1968 _argvec[5] = (unsigned long)arg5; \
1969 _argvec[6] = (unsigned long)arg6; \
1970 _argvec[7] = (unsigned long)arg7; \
1971 _argvec[8] = (unsigned long)arg8; \
1972 _argvec[9] = (unsigned long)arg9; \
1973 _argvec[10] = (unsigned long)arg10; \
1974 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
1975 "addi 1,1,-16\n\t"
\
1976 "lwz 3,40(11)\n\t" \
1978 "lwz 3,36(11)\n\t" \
1982 "lwz 5,12(11)\n\t" \
1983 "lwz 6,16(11)\n\t"
\
1984 "lwz 7,20(11)\n\t" \
1985 "lwz 8,24(11)\n\t" \
1986 "lwz 9,28(11)\n\t" \
1987 "lwz 10,32(11)\n\t"
\
1988 "lwz 11,0(11)\n\t" \
1989 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
1991 :
"r"(&_argvec[0]) \
1992 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
1993 lval = (__typeof__(lval))_res; \
1996#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11) \
1998 volatile OrigFn _orig = (orig); \
1999 volatile unsigned long _argvec[12]; \
2000 volatile unsigned long _res; \
2001 _argvec[0] = (unsigned long)_orig.nraddr; \
2002 _argvec[1] = (unsigned long)arg1; \
2003 _argvec[2] = (unsigned long)arg2; \
2004 _argvec[3] = (unsigned long)arg3; \
2005 _argvec[4] = (unsigned long)arg4; \
2006 _argvec[5] = (unsigned long)arg5; \
2007 _argvec[6] = (unsigned long)arg6; \
2008 _argvec[7] = (unsigned long)arg7; \
2009 _argvec[8] = (unsigned long)arg8; \
2010 _argvec[9] = (unsigned long)arg9; \
2011 _argvec[10] = (unsigned long)arg10; \
2012 _argvec[11] = (unsigned long)arg11; \
2013 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2014 "addi 1,1,-32\n\t"
\
2015 "lwz 3,44(11)\n\t" \
2017 "lwz 3,40(11)\n\t" \
2019 "lwz 3,36(11)\n\t" \
2023 "lwz 5,12(11)\n\t" \
2024 "lwz 6,16(11)\n\t"
\
2025 "lwz 7,20(11)\n\t" \
2026 "lwz 8,24(11)\n\t" \
2027 "lwz 9,28(11)\n\t" \
2028 "lwz 10,32(11)\n\t"
\
2029 "lwz 11,0(11)\n\t" \
2030 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2032 :
"r"(&_argvec[0]) \
2033 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2034 lval = (__typeof__(lval))_res; \
2037#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12) \
2039 volatile OrigFn _orig = (orig); \
2040 volatile unsigned long _argvec[13]; \
2041 volatile unsigned long _res; \
2042 _argvec[0] = (unsigned long)_orig.nraddr; \
2043 _argvec[1] = (unsigned long)arg1; \
2044 _argvec[2] = (unsigned long)arg2; \
2045 _argvec[3] = (unsigned long)arg3; \
2046 _argvec[4] = (unsigned long)arg4; \
2047 _argvec[5] = (unsigned long)arg5; \
2048 _argvec[6] = (unsigned long)arg6; \
2049 _argvec[7] = (unsigned long)arg7; \
2050 _argvec[8] = (unsigned long)arg8; \
2051 _argvec[9] = (unsigned long)arg9; \
2052 _argvec[10] = (unsigned long)arg10; \
2053 _argvec[11] = (unsigned long)arg11; \
2054 _argvec[12] = (unsigned long)arg12; \
2055 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2056 "addi 1,1,-32\n\t"
\
2057 "lwz 3,48(11)\n\t" \
2059 "lwz 3,44(11)\n\t" \
2061 "lwz 3,40(11)\n\t" \
2063 "lwz 3,36(11)\n\t" \
2067 "lwz 5,12(11)\n\t" \
2068 "lwz 6,16(11)\n\t"
\
2069 "lwz 7,20(11)\n\t" \
2070 "lwz 8,24(11)\n\t" \
2071 "lwz 9,28(11)\n\t" \
2072 "lwz 10,32(11)\n\t"
\
2073 "lwz 11,0(11)\n\t" \
2074 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 VALGRIND_RESTORE_STACK "mr %0,3" \
2076 :
"r"(&_argvec[0]) \
2077 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2078 lval = (__typeof__(lval))_res; \
2085#if defined(PLAT_ppc64_linux)
2090#define __CALLER_SAVED_REGS \
2091 "lr", "ctr", "xer", "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", "r0", "r2", "r3", "r4", "r5", "r6", \
2092 "r7", "r8", "r9", "r10", "r11", "r12", "r13"
2099#define VALGRIND_ALIGN_STACK \
2101 "rldicr 1,1,0,59\n\t"
2102#define VALGRIND_RESTORE_STACK "mr 1,28\n\t"
2107#define CALL_FN_W_v(lval, orig) \
2109 volatile OrigFn _orig = (orig); \
2110 volatile unsigned long _argvec[3 + 0]; \
2111 volatile unsigned long _res; \
2113 _argvec[1] = (unsigned long)_orig.r2; \
2114 _argvec[2] = (unsigned long)_orig.nraddr; \
2115 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2116 "std 2,-16(11)\n\t"
\
2118 "ld 11, 0(11)\n\t" \
2119 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2121 "ld 2,-16(11)\n\t"
\
2122 VALGRIND_RESTORE_STACK \
2124 :
"r"(&_argvec[2]) \
2125 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2126 lval = (__typeof__(lval))_res; \
2129#define CALL_FN_W_W(lval, orig, arg1) \
2131 volatile OrigFn _orig = (orig); \
2132 volatile unsigned long _argvec[3 + 1]; \
2133 volatile unsigned long _res; \
2135 _argvec[1] = (unsigned long)_orig.r2; \
2136 _argvec[2] = (unsigned long)_orig.nraddr; \
2137 _argvec[2 + 1] = (unsigned long)arg1; \
2138 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2139 "std 2,-16(11)\n\t"
\
2142 "ld 11, 0(11)\n\t" \
2143 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2145 "ld 2,-16(11)\n\t"
\
2146 VALGRIND_RESTORE_STACK \
2148 :
"r"(&_argvec[2]) \
2149 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2150 lval = (__typeof__(lval))_res; \
2153#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
2155 volatile OrigFn _orig = (orig); \
2156 volatile unsigned long _argvec[3 + 2]; \
2157 volatile unsigned long _res; \
2159 _argvec[1] = (unsigned long)_orig.r2; \
2160 _argvec[2] = (unsigned long)_orig.nraddr; \
2161 _argvec[2 + 1] = (unsigned long)arg1; \
2162 _argvec[2 + 2] = (unsigned long)arg2; \
2163 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2164 "std 2,-16(11)\n\t"
\
2167 "ld 4, 16(11)\n\t" \
2168 "ld 11, 0(11)\n\t" \
2169 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2171 "ld 2,-16(11)\n\t"
\
2172 VALGRIND_RESTORE_STACK \
2174 :
"r"(&_argvec[2]) \
2175 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2176 lval = (__typeof__(lval))_res; \
2179#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
2181 volatile OrigFn _orig = (orig); \
2182 volatile unsigned long _argvec[3 + 3]; \
2183 volatile unsigned long _res; \
2185 _argvec[1] = (unsigned long)_orig.r2; \
2186 _argvec[2] = (unsigned long)_orig.nraddr; \
2187 _argvec[2 + 1] = (unsigned long)arg1; \
2188 _argvec[2 + 2] = (unsigned long)arg2; \
2189 _argvec[2 + 3] = (unsigned long)arg3; \
2190 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2191 "std 2,-16(11)\n\t"
\
2194 "ld 4, 16(11)\n\t" \
2195 "ld 5, 24(11)\n\t" \
2196 "ld 11, 0(11)\n\t" \
2197 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2199 "ld 2,-16(11)\n\t"
\
2200 VALGRIND_RESTORE_STACK \
2202 :
"r"(&_argvec[2]) \
2203 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2204 lval = (__typeof__(lval))_res; \
2207#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
2209 volatile OrigFn _orig = (orig); \
2210 volatile unsigned long _argvec[3 + 4]; \
2211 volatile unsigned long _res; \
2213 _argvec[1] = (unsigned long)_orig.r2; \
2214 _argvec[2] = (unsigned long)_orig.nraddr; \
2215 _argvec[2 + 1] = (unsigned long)arg1; \
2216 _argvec[2 + 2] = (unsigned long)arg2; \
2217 _argvec[2 + 3] = (unsigned long)arg3; \
2218 _argvec[2 + 4] = (unsigned long)arg4; \
2219 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2220 "std 2,-16(11)\n\t"
\
2223 "ld 4, 16(11)\n\t" \
2224 "ld 5, 24(11)\n\t" \
2225 "ld 6, 32(11)\n\t" \
2226 "ld 11, 0(11)\n\t" \
2227 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2229 "ld 2,-16(11)\n\t"
\
2230 VALGRIND_RESTORE_STACK \
2232 :
"r"(&_argvec[2]) \
2233 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2234 lval = (__typeof__(lval))_res; \
2237#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
2239 volatile OrigFn _orig = (orig); \
2240 volatile unsigned long _argvec[3 + 5]; \
2241 volatile unsigned long _res; \
2243 _argvec[1] = (unsigned long)_orig.r2; \
2244 _argvec[2] = (unsigned long)_orig.nraddr; \
2245 _argvec[2 + 1] = (unsigned long)arg1; \
2246 _argvec[2 + 2] = (unsigned long)arg2; \
2247 _argvec[2 + 3] = (unsigned long)arg3; \
2248 _argvec[2 + 4] = (unsigned long)arg4; \
2249 _argvec[2 + 5] = (unsigned long)arg5; \
2250 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2251 "std 2,-16(11)\n\t"
\
2254 "ld 4, 16(11)\n\t" \
2255 "ld 5, 24(11)\n\t" \
2256 "ld 6, 32(11)\n\t" \
2257 "ld 7, 40(11)\n\t" \
2258 "ld 11, 0(11)\n\t" \
2259 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2261 "ld 2,-16(11)\n\t"
\
2262 VALGRIND_RESTORE_STACK \
2264 :
"r"(&_argvec[2]) \
2265 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2266 lval = (__typeof__(lval))_res; \
2269#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6) \
2271 volatile OrigFn _orig = (orig); \
2272 volatile unsigned long _argvec[3 + 6]; \
2273 volatile unsigned long _res; \
2275 _argvec[1] = (unsigned long)_orig.r2; \
2276 _argvec[2] = (unsigned long)_orig.nraddr; \
2277 _argvec[2 + 1] = (unsigned long)arg1; \
2278 _argvec[2 + 2] = (unsigned long)arg2; \
2279 _argvec[2 + 3] = (unsigned long)arg3; \
2280 _argvec[2 + 4] = (unsigned long)arg4; \
2281 _argvec[2 + 5] = (unsigned long)arg5; \
2282 _argvec[2 + 6] = (unsigned long)arg6; \
2283 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2284 "std 2,-16(11)\n\t"
\
2287 "ld 4, 16(11)\n\t" \
2288 "ld 5, 24(11)\n\t" \
2289 "ld 6, 32(11)\n\t" \
2290 "ld 7, 40(11)\n\t" \
2291 "ld 8, 48(11)\n\t" \
2292 "ld 11, 0(11)\n\t" \
2293 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2295 "ld 2,-16(11)\n\t"
\
2296 VALGRIND_RESTORE_STACK \
2298 :
"r"(&_argvec[2]) \
2299 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2300 lval = (__typeof__(lval))_res; \
2303#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7) \
2305 volatile OrigFn _orig = (orig); \
2306 volatile unsigned long _argvec[3 + 7]; \
2307 volatile unsigned long _res; \
2309 _argvec[1] = (unsigned long)_orig.r2; \
2310 _argvec[2] = (unsigned long)_orig.nraddr; \
2311 _argvec[2 + 1] = (unsigned long)arg1; \
2312 _argvec[2 + 2] = (unsigned long)arg2; \
2313 _argvec[2 + 3] = (unsigned long)arg3; \
2314 _argvec[2 + 4] = (unsigned long)arg4; \
2315 _argvec[2 + 5] = (unsigned long)arg5; \
2316 _argvec[2 + 6] = (unsigned long)arg6; \
2317 _argvec[2 + 7] = (unsigned long)arg7; \
2318 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2319 "std 2,-16(11)\n\t"
\
2322 "ld 4, 16(11)\n\t" \
2323 "ld 5, 24(11)\n\t" \
2324 "ld 6, 32(11)\n\t" \
2325 "ld 7, 40(11)\n\t" \
2326 "ld 8, 48(11)\n\t" \
2327 "ld 9, 56(11)\n\t" \
2328 "ld 11, 0(11)\n\t" \
2329 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2331 "ld 2,-16(11)\n\t"
\
2332 VALGRIND_RESTORE_STACK \
2334 :
"r"(&_argvec[2]) \
2335 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2336 lval = (__typeof__(lval))_res; \
2339#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) \
2341 volatile OrigFn _orig = (orig); \
2342 volatile unsigned long _argvec[3 + 8]; \
2343 volatile unsigned long _res; \
2345 _argvec[1] = (unsigned long)_orig.r2; \
2346 _argvec[2] = (unsigned long)_orig.nraddr; \
2347 _argvec[2 + 1] = (unsigned long)arg1; \
2348 _argvec[2 + 2] = (unsigned long)arg2; \
2349 _argvec[2 + 3] = (unsigned long)arg3; \
2350 _argvec[2 + 4] = (unsigned long)arg4; \
2351 _argvec[2 + 5] = (unsigned long)arg5; \
2352 _argvec[2 + 6] = (unsigned long)arg6; \
2353 _argvec[2 + 7] = (unsigned long)arg7; \
2354 _argvec[2 + 8] = (unsigned long)arg8; \
2355 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2356 "std 2,-16(11)\n\t"
\
2359 "ld 4, 16(11)\n\t" \
2360 "ld 5, 24(11)\n\t" \
2361 "ld 6, 32(11)\n\t" \
2362 "ld 7, 40(11)\n\t" \
2363 "ld 8, 48(11)\n\t" \
2364 "ld 9, 56(11)\n\t" \
2365 "ld 10, 64(11)\n\t" \
2366 "ld 11, 0(11)\n\t" \
2367 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2369 "ld 2,-16(11)\n\t"
\
2370 VALGRIND_RESTORE_STACK \
2372 :
"r"(&_argvec[2]) \
2373 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2374 lval = (__typeof__(lval))_res; \
2377#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) \
2379 volatile OrigFn _orig = (orig); \
2380 volatile unsigned long _argvec[3 + 9]; \
2381 volatile unsigned long _res; \
2383 _argvec[1] = (unsigned long)_orig.r2; \
2384 _argvec[2] = (unsigned long)_orig.nraddr; \
2385 _argvec[2 + 1] = (unsigned long)arg1; \
2386 _argvec[2 + 2] = (unsigned long)arg2; \
2387 _argvec[2 + 3] = (unsigned long)arg3; \
2388 _argvec[2 + 4] = (unsigned long)arg4; \
2389 _argvec[2 + 5] = (unsigned long)arg5; \
2390 _argvec[2 + 6] = (unsigned long)arg6; \
2391 _argvec[2 + 7] = (unsigned long)arg7; \
2392 _argvec[2 + 8] = (unsigned long)arg8; \
2393 _argvec[2 + 9] = (unsigned long)arg9; \
2394 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2395 "std 2,-16(11)\n\t"
\
2397 "addi 1,1,-128\n\t" \
2399 "std 3,112(1)\n\t"
\
2401 "ld 4, 16(11)\n\t" \
2402 "ld 5, 24(11)\n\t" \
2403 "ld 6, 32(11)\n\t" \
2404 "ld 7, 40(11)\n\t" \
2405 "ld 8, 48(11)\n\t" \
2406 "ld 9, 56(11)\n\t" \
2407 "ld 10, 64(11)\n\t" \
2408 "ld 11, 0(11)\n\t" \
2409 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2411 "ld 2,-16(11)\n\t"
\
2412 VALGRIND_RESTORE_STACK \
2414 :
"r"(&_argvec[2]) \
2415 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2416 lval = (__typeof__(lval))_res; \
2419#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10) \
2421 volatile OrigFn _orig = (orig); \
2422 volatile unsigned long _argvec[3 + 10]; \
2423 volatile unsigned long _res; \
2425 _argvec[1] = (unsigned long)_orig.r2; \
2426 _argvec[2] = (unsigned long)_orig.nraddr; \
2427 _argvec[2 + 1] = (unsigned long)arg1; \
2428 _argvec[2 + 2] = (unsigned long)arg2; \
2429 _argvec[2 + 3] = (unsigned long)arg3; \
2430 _argvec[2 + 4] = (unsigned long)arg4; \
2431 _argvec[2 + 5] = (unsigned long)arg5; \
2432 _argvec[2 + 6] = (unsigned long)arg6; \
2433 _argvec[2 + 7] = (unsigned long)arg7; \
2434 _argvec[2 + 8] = (unsigned long)arg8; \
2435 _argvec[2 + 9] = (unsigned long)arg9; \
2436 _argvec[2 + 10] = (unsigned long)arg10; \
2437 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2438 "std 2,-16(11)\n\t"
\
2440 "addi 1,1,-128\n\t" \
2442 "std 3,120(1)\n\t"
\
2444 "std 3,112(1)\n\t"
\
2446 "ld 4, 16(11)\n\t" \
2447 "ld 5, 24(11)\n\t" \
2448 "ld 6, 32(11)\n\t" \
2449 "ld 7, 40(11)\n\t" \
2450 "ld 8, 48(11)\n\t" \
2451 "ld 9, 56(11)\n\t" \
2452 "ld 10, 64(11)\n\t" \
2453 "ld 11, 0(11)\n\t" \
2454 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2456 "ld 2,-16(11)\n\t"
\
2457 VALGRIND_RESTORE_STACK \
2459 :
"r"(&_argvec[2]) \
2460 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2461 lval = (__typeof__(lval))_res; \
2464#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11) \
2466 volatile OrigFn _orig = (orig); \
2467 volatile unsigned long _argvec[3 + 11]; \
2468 volatile unsigned long _res; \
2470 _argvec[1] = (unsigned long)_orig.r2; \
2471 _argvec[2] = (unsigned long)_orig.nraddr; \
2472 _argvec[2 + 1] = (unsigned long)arg1; \
2473 _argvec[2 + 2] = (unsigned long)arg2; \
2474 _argvec[2 + 3] = (unsigned long)arg3; \
2475 _argvec[2 + 4] = (unsigned long)arg4; \
2476 _argvec[2 + 5] = (unsigned long)arg5; \
2477 _argvec[2 + 6] = (unsigned long)arg6; \
2478 _argvec[2 + 7] = (unsigned long)arg7; \
2479 _argvec[2 + 8] = (unsigned long)arg8; \
2480 _argvec[2 + 9] = (unsigned long)arg9; \
2481 _argvec[2 + 10] = (unsigned long)arg10; \
2482 _argvec[2 + 11] = (unsigned long)arg11; \
2483 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2484 "std 2,-16(11)\n\t"
\
2486 "addi 1,1,-144\n\t" \
2488 "std 3,128(1)\n\t"
\
2490 "std 3,120(1)\n\t"
\
2492 "std 3,112(1)\n\t"
\
2494 "ld 4, 16(11)\n\t" \
2495 "ld 5, 24(11)\n\t" \
2496 "ld 6, 32(11)\n\t" \
2497 "ld 7, 40(11)\n\t" \
2498 "ld 8, 48(11)\n\t" \
2499 "ld 9, 56(11)\n\t" \
2500 "ld 10, 64(11)\n\t" \
2501 "ld 11, 0(11)\n\t" \
2502 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2504 "ld 2,-16(11)\n\t"
\
2505 VALGRIND_RESTORE_STACK \
2507 :
"r"(&_argvec[2]) \
2508 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2509 lval = (__typeof__(lval))_res; \
2512#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12) \
2514 volatile OrigFn _orig = (orig); \
2515 volatile unsigned long _argvec[3 + 12]; \
2516 volatile unsigned long _res; \
2518 _argvec[1] = (unsigned long)_orig.r2; \
2519 _argvec[2] = (unsigned long)_orig.nraddr; \
2520 _argvec[2 + 1] = (unsigned long)arg1; \
2521 _argvec[2 + 2] = (unsigned long)arg2; \
2522 _argvec[2 + 3] = (unsigned long)arg3; \
2523 _argvec[2 + 4] = (unsigned long)arg4; \
2524 _argvec[2 + 5] = (unsigned long)arg5; \
2525 _argvec[2 + 6] = (unsigned long)arg6; \
2526 _argvec[2 + 7] = (unsigned long)arg7; \
2527 _argvec[2 + 8] = (unsigned long)arg8; \
2528 _argvec[2 + 9] = (unsigned long)arg9; \
2529 _argvec[2 + 10] = (unsigned long)arg10; \
2530 _argvec[2 + 11] = (unsigned long)arg11; \
2531 _argvec[2 + 12] = (unsigned long)arg12; \
2532 __asm__ volatile(VALGRIND_ALIGN_STACK "mr 11,%1\n\t" \
2533 "std 2,-16(11)\n\t"
\
2535 "addi 1,1,-144\n\t" \
2537 "std 3,136(1)\n\t"
\
2539 "std 3,128(1)\n\t"
\
2541 "std 3,120(1)\n\t"
\
2543 "std 3,112(1)\n\t"
\
2545 "ld 4, 16(11)\n\t" \
2546 "ld 5, 24(11)\n\t" \
2547 "ld 6, 32(11)\n\t" \
2548 "ld 7, 40(11)\n\t" \
2549 "ld 8, 48(11)\n\t" \
2550 "ld 9, 56(11)\n\t" \
2551 "ld 10, 64(11)\n\t" \
2552 "ld 11, 0(11)\n\t" \
2553 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 "mr 11,%1\n\t" \
2555 "ld 2,-16(11)\n\t"
\
2556 VALGRIND_RESTORE_STACK \
2558 :
"r"(&_argvec[2]) \
2559 :
"cc", "memory", __CALLER_SAVED_REGS, "r28"); \
2560 lval = (__typeof__(lval))_res; \
2567#if defined(PLAT_arm_linux)
2570#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3", "r4", "r14"
2585#define VALGRIND_ALIGN_STACK \
2588 "bic r4, r4, #7\n\t" \
2590#define VALGRIND_RESTORE_STACK "mov sp, r10\n\t"
2595#define CALL_FN_W_v(lval, orig) \
2597 volatile OrigFn _orig = (orig); \
2598 volatile unsigned long _argvec[1]; \
2599 volatile unsigned long _res; \
2600 _argvec[0] = (unsigned long)_orig.nraddr; \
2601 __asm__ volatile(VALGRIND_ALIGN_STACK "ldr r4, [%1] \n\t" \
2602 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2604 :
"0"(&_argvec[0]) \
2605 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2606 lval = (__typeof__(lval))_res; \
2609#define CALL_FN_W_W(lval, orig, arg1) \
2611 volatile OrigFn _orig = (orig); \
2612 volatile unsigned long _argvec[2]; \
2613 volatile unsigned long _res; \
2614 _argvec[0] = (unsigned long)_orig.nraddr; \
2615 _argvec[1] = (unsigned long)(arg1); \
2616 __asm__ volatile(VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2617 "ldr r4, [%1] \n\t"
\
2618 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2620 :
"0"(&_argvec[0]) \
2621 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2622 lval = (__typeof__(lval))_res; \
2625#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
2627 volatile OrigFn _orig = (orig); \
2628 volatile unsigned long _argvec[3]; \
2629 volatile unsigned long _res; \
2630 _argvec[0] = (unsigned long)_orig.nraddr; \
2631 _argvec[1] = (unsigned long)(arg1); \
2632 _argvec[2] = (unsigned long)(arg2); \
2633 __asm__ volatile(VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2634 "ldr r1, [%1, #8] \n\t" \
2635 "ldr r4, [%1] \n\t"
\
2636 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2638 :
"0"(&_argvec[0]) \
2639 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2640 lval = (__typeof__(lval))_res; \
2643#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
2645 volatile OrigFn _orig = (orig); \
2646 volatile unsigned long _argvec[4]; \
2647 volatile unsigned long _res; \
2648 _argvec[0] = (unsigned long)_orig.nraddr; \
2649 _argvec[1] = (unsigned long)(arg1); \
2650 _argvec[2] = (unsigned long)(arg2); \
2651 _argvec[3] = (unsigned long)(arg3); \
2652 __asm__ volatile(VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2653 "ldr r1, [%1, #8] \n\t" \
2654 "ldr r2, [%1, #12] \n\t" \
2655 "ldr r4, [%1] \n\t"
\
2656 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0\n" \
2658 :
"0"(&_argvec[0]) \
2659 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2660 lval = (__typeof__(lval))_res; \
2663#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
2665 volatile OrigFn _orig = (orig); \
2666 volatile unsigned long _argvec[5]; \
2667 volatile unsigned long _res; \
2668 _argvec[0] = (unsigned long)_orig.nraddr; \
2669 _argvec[1] = (unsigned long)(arg1); \
2670 _argvec[2] = (unsigned long)(arg2); \
2671 _argvec[3] = (unsigned long)(arg3); \
2672 _argvec[4] = (unsigned long)(arg4); \
2673 __asm__ volatile(VALGRIND_ALIGN_STACK "ldr r0, [%1, #4] \n\t" \
2674 "ldr r1, [%1, #8] \n\t" \
2675 "ldr r2, [%1, #12] \n\t" \
2676 "ldr r3, [%1, #16] \n\t" \
2677 "ldr r4, [%1] \n\t"
\
2678 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2680 :
"0"(&_argvec[0]) \
2681 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2682 lval = (__typeof__(lval))_res; \
2685#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
2687 volatile OrigFn _orig = (orig); \
2688 volatile unsigned long _argvec[6]; \
2689 volatile unsigned long _res; \
2690 _argvec[0] = (unsigned long)_orig.nraddr; \
2691 _argvec[1] = (unsigned long)(arg1); \
2692 _argvec[2] = (unsigned long)(arg2); \
2693 _argvec[3] = (unsigned long)(arg3); \
2694 _argvec[4] = (unsigned long)(arg4); \
2695 _argvec[5] = (unsigned long)(arg5); \
2696 __asm__ volatile(VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2697 "ldr r0, [%1, #20] \n\t" \
2699 "ldr r0, [%1, #4] \n\t" \
2700 "ldr r1, [%1, #8] \n\t" \
2701 "ldr r2, [%1, #12] \n\t" \
2702 "ldr r3, [%1, #16] \n\t" \
2703 "ldr r4, [%1] \n\t"
\
2704 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2706 :
"0"(&_argvec[0]) \
2707 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2708 lval = (__typeof__(lval))_res; \
2711#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6) \
2713 volatile OrigFn _orig = (orig); \
2714 volatile unsigned long _argvec[7]; \
2715 volatile unsigned long _res; \
2716 _argvec[0] = (unsigned long)_orig.nraddr; \
2717 _argvec[1] = (unsigned long)(arg1); \
2718 _argvec[2] = (unsigned long)(arg2); \
2719 _argvec[3] = (unsigned long)(arg3); \
2720 _argvec[4] = (unsigned long)(arg4); \
2721 _argvec[5] = (unsigned long)(arg5); \
2722 _argvec[6] = (unsigned long)(arg6); \
2723 __asm__ volatile(VALGRIND_ALIGN_STACK "ldr r0, [%1, #20] \n\t" \
2724 "ldr r1, [%1, #24] \n\t" \
2725 "push {r0, r1} \n\t" \
2726 "ldr r0, [%1, #4] \n\t" \
2727 "ldr r1, [%1, #8] \n\t" \
2728 "ldr r2, [%1, #12] \n\t" \
2729 "ldr r3, [%1, #16] \n\t" \
2730 "ldr r4, [%1] \n\t"
\
2731 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2733 :
"0"(&_argvec[0]) \
2734 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2735 lval = (__typeof__(lval))_res; \
2738#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7) \
2740 volatile OrigFn _orig = (orig); \
2741 volatile unsigned long _argvec[8]; \
2742 volatile unsigned long _res; \
2743 _argvec[0] = (unsigned long)_orig.nraddr; \
2744 _argvec[1] = (unsigned long)(arg1); \
2745 _argvec[2] = (unsigned long)(arg2); \
2746 _argvec[3] = (unsigned long)(arg3); \
2747 _argvec[4] = (unsigned long)(arg4); \
2748 _argvec[5] = (unsigned long)(arg5); \
2749 _argvec[6] = (unsigned long)(arg6); \
2750 _argvec[7] = (unsigned long)(arg7); \
2751 __asm__ volatile(VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2752 "ldr r0, [%1, #20] \n\t" \
2753 "ldr r1, [%1, #24] \n\t" \
2754 "ldr r2, [%1, #28] \n\t" \
2755 "push {r0, r1, r2} \n\t" \
2756 "ldr r0, [%1, #4] \n\t" \
2757 "ldr r1, [%1, #8] \n\t" \
2758 "ldr r2, [%1, #12] \n\t" \
2759 "ldr r3, [%1, #16] \n\t" \
2760 "ldr r4, [%1] \n\t"
\
2761 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2763 :
"0"(&_argvec[0]) \
2764 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2765 lval = (__typeof__(lval))_res; \
2768#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) \
2770 volatile OrigFn _orig = (orig); \
2771 volatile unsigned long _argvec[9]; \
2772 volatile unsigned long _res; \
2773 _argvec[0] = (unsigned long)_orig.nraddr; \
2774 _argvec[1] = (unsigned long)(arg1); \
2775 _argvec[2] = (unsigned long)(arg2); \
2776 _argvec[3] = (unsigned long)(arg3); \
2777 _argvec[4] = (unsigned long)(arg4); \
2778 _argvec[5] = (unsigned long)(arg5); \
2779 _argvec[6] = (unsigned long)(arg6); \
2780 _argvec[7] = (unsigned long)(arg7); \
2781 _argvec[8] = (unsigned long)(arg8); \
2782 __asm__ volatile(VALGRIND_ALIGN_STACK "ldr r0, [%1, #20] \n\t" \
2783 "ldr r1, [%1, #24] \n\t" \
2784 "ldr r2, [%1, #28] \n\t" \
2785 "ldr r3, [%1, #32] \n\t" \
2786 "push {r0, r1, r2, r3} \n\t" \
2787 "ldr r0, [%1, #4] \n\t" \
2788 "ldr r1, [%1, #8] \n\t" \
2789 "ldr r2, [%1, #12] \n\t" \
2790 "ldr r3, [%1, #16] \n\t" \
2791 "ldr r4, [%1] \n\t"
\
2792 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2794 :
"0"(&_argvec[0]) \
2795 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2796 lval = (__typeof__(lval))_res; \
2799#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) \
2801 volatile OrigFn _orig = (orig); \
2802 volatile unsigned long _argvec[10]; \
2803 volatile unsigned long _res; \
2804 _argvec[0] = (unsigned long)_orig.nraddr; \
2805 _argvec[1] = (unsigned long)(arg1); \
2806 _argvec[2] = (unsigned long)(arg2); \
2807 _argvec[3] = (unsigned long)(arg3); \
2808 _argvec[4] = (unsigned long)(arg4); \
2809 _argvec[5] = (unsigned long)(arg5); \
2810 _argvec[6] = (unsigned long)(arg6); \
2811 _argvec[7] = (unsigned long)(arg7); \
2812 _argvec[8] = (unsigned long)(arg8); \
2813 _argvec[9] = (unsigned long)(arg9); \
2814 __asm__ volatile(VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2815 "ldr r0, [%1, #20] \n\t" \
2816 "ldr r1, [%1, #24] \n\t" \
2817 "ldr r2, [%1, #28] \n\t" \
2818 "ldr r3, [%1, #32] \n\t" \
2819 "ldr r4, [%1, #36] \n\t" \
2820 "push {r0, r1, r2, r3, r4} \n\t" \
2821 "ldr r0, [%1, #4] \n\t" \
2822 "ldr r1, [%1, #8] \n\t" \
2823 "ldr r2, [%1, #12] \n\t" \
2824 "ldr r3, [%1, #16] \n\t" \
2825 "ldr r4, [%1] \n\t"
\
2826 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2828 :
"0"(&_argvec[0]) \
2829 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2830 lval = (__typeof__(lval))_res; \
2833#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10) \
2835 volatile OrigFn _orig = (orig); \
2836 volatile unsigned long _argvec[11]; \
2837 volatile unsigned long _res; \
2838 _argvec[0] = (unsigned long)_orig.nraddr; \
2839 _argvec[1] = (unsigned long)(arg1); \
2840 _argvec[2] = (unsigned long)(arg2); \
2841 _argvec[3] = (unsigned long)(arg3); \
2842 _argvec[4] = (unsigned long)(arg4); \
2843 _argvec[5] = (unsigned long)(arg5); \
2844 _argvec[6] = (unsigned long)(arg6); \
2845 _argvec[7] = (unsigned long)(arg7); \
2846 _argvec[8] = (unsigned long)(arg8); \
2847 _argvec[9] = (unsigned long)(arg9); \
2848 _argvec[10] = (unsigned long)(arg10); \
2849 __asm__ volatile(VALGRIND_ALIGN_STACK "ldr r0, [%1, #40] \n\t" \
2851 "ldr r0, [%1, #20] \n\t" \
2852 "ldr r1, [%1, #24] \n\t" \
2853 "ldr r2, [%1, #28] \n\t" \
2854 "ldr r3, [%1, #32] \n\t" \
2855 "ldr r4, [%1, #36] \n\t" \
2856 "push {r0, r1, r2, r3, r4} \n\t" \
2857 "ldr r0, [%1, #4] \n\t" \
2858 "ldr r1, [%1, #8] \n\t" \
2859 "ldr r2, [%1, #12] \n\t" \
2860 "ldr r3, [%1, #16] \n\t" \
2861 "ldr r4, [%1] \n\t"
\
2862 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2864 :
"0"(&_argvec[0]) \
2865 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2866 lval = (__typeof__(lval))_res; \
2869#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11) \
2871 volatile OrigFn _orig = (orig); \
2872 volatile unsigned long _argvec[12]; \
2873 volatile unsigned long _res; \
2874 _argvec[0] = (unsigned long)_orig.nraddr; \
2875 _argvec[1] = (unsigned long)(arg1); \
2876 _argvec[2] = (unsigned long)(arg2); \
2877 _argvec[3] = (unsigned long)(arg3); \
2878 _argvec[4] = (unsigned long)(arg4); \
2879 _argvec[5] = (unsigned long)(arg5); \
2880 _argvec[6] = (unsigned long)(arg6); \
2881 _argvec[7] = (unsigned long)(arg7); \
2882 _argvec[8] = (unsigned long)(arg8); \
2883 _argvec[9] = (unsigned long)(arg9); \
2884 _argvec[10] = (unsigned long)(arg10); \
2885 _argvec[11] = (unsigned long)(arg11); \
2886 __asm__ volatile(VALGRIND_ALIGN_STACK "sub sp, sp, #4 \n\t" \
2887 "ldr r0, [%1, #40] \n\t" \
2888 "ldr r1, [%1, #44] \n\t" \
2889 "push {r0, r1} \n\t" \
2890 "ldr r0, [%1, #20] \n\t" \
2891 "ldr r1, [%1, #24] \n\t" \
2892 "ldr r2, [%1, #28] \n\t" \
2893 "ldr r3, [%1, #32] \n\t" \
2894 "ldr r4, [%1, #36] \n\t" \
2895 "push {r0, r1, r2, r3, r4} \n\t" \
2896 "ldr r0, [%1, #4] \n\t" \
2897 "ldr r1, [%1, #8] \n\t" \
2898 "ldr r2, [%1, #12] \n\t" \
2899 "ldr r3, [%1, #16] \n\t" \
2900 "ldr r4, [%1] \n\t"
\
2901 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2903 :
"0"(&_argvec[0]) \
2904 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2905 lval = (__typeof__(lval))_res; \
2908#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12) \
2910 volatile OrigFn _orig = (orig); \
2911 volatile unsigned long _argvec[13]; \
2912 volatile unsigned long _res; \
2913 _argvec[0] = (unsigned long)_orig.nraddr; \
2914 _argvec[1] = (unsigned long)(arg1); \
2915 _argvec[2] = (unsigned long)(arg2); \
2916 _argvec[3] = (unsigned long)(arg3); \
2917 _argvec[4] = (unsigned long)(arg4); \
2918 _argvec[5] = (unsigned long)(arg5); \
2919 _argvec[6] = (unsigned long)(arg6); \
2920 _argvec[7] = (unsigned long)(arg7); \
2921 _argvec[8] = (unsigned long)(arg8); \
2922 _argvec[9] = (unsigned long)(arg9); \
2923 _argvec[10] = (unsigned long)(arg10); \
2924 _argvec[11] = (unsigned long)(arg11); \
2925 _argvec[12] = (unsigned long)(arg12); \
2926 __asm__ volatile(VALGRIND_ALIGN_STACK "ldr r0, [%1, #40] \n\t" \
2927 "ldr r1, [%1, #44] \n\t" \
2928 "ldr r2, [%1, #48] \n\t" \
2929 "push {r0, r1, r2} \n\t" \
2930 "ldr r0, [%1, #20] \n\t" \
2931 "ldr r1, [%1, #24] \n\t" \
2932 "ldr r2, [%1, #28] \n\t" \
2933 "ldr r3, [%1, #32] \n\t" \
2934 "ldr r4, [%1, #36] \n\t" \
2935 "push {r0, r1, r2, r3, r4} \n\t" \
2936 "ldr r0, [%1, #4] \n\t" \
2937 "ldr r1, [%1, #8] \n\t" \
2938 "ldr r2, [%1, #12] \n\t" \
2939 "ldr r3, [%1, #16] \n\t" \
2940 "ldr r4, [%1] \n\t"
\
2941 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 VALGRIND_RESTORE_STACK "mov %0, r0" \
2943 :
"0"(&_argvec[0]) \
2944 :
"cc", "memory", __CALLER_SAVED_REGS, "r10"); \
2945 lval = (__typeof__(lval))_res; \
2952#if defined(PLAT_s390x_linux)
2958#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
2959#define __FRAME_POINTER , "d"(__builtin_dwarf_cfa())
2960#define VALGRIND_CFI_PROLOGUE \
2961 ".cfi_remember_state\n\t" \
2965 ".cfi_def_cfa r11, 0\n\t"
2966#define VALGRIND_CFI_EPILOGUE \
2968 ".cfi_restore_state\n\t"
2970#define __FRAME_POINTER
2971#define VALGRIND_CFI_PROLOGUE "lgr 1,%1\n\t"
2972#define VALGRIND_CFI_EPILOGUE
2984#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
2995#define CALL_FN_W_v(lval, orig) \
2997 volatile OrigFn _orig = (orig); \
2998 volatile unsigned long _argvec[1]; \
2999 volatile unsigned long _res; \
3000 _argvec[0] = (unsigned long)_orig.nraddr; \
3001 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3003 VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3004 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3006 :
"d"(&_argvec[0])__FRAME_POINTER \
3007 :
"cc", "memory", __CALLER_SAVED_REGS, "7"); \
3008 lval = (__typeof__(lval))_res; \
3012#define CALL_FN_W_W(lval, orig, arg1) \
3014 volatile OrigFn _orig = (orig); \
3015 volatile unsigned long _argvec[2]; \
3016 volatile unsigned long _res; \
3017 _argvec[0] = (unsigned long)_orig.nraddr; \
3018 _argvec[1] = (unsigned long)arg1; \
3019 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3021 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3022 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3024 :
"a"(&_argvec[0])__FRAME_POINTER \
3025 :
"cc", "memory", __CALLER_SAVED_REGS, "7"); \
3026 lval = (__typeof__(lval))_res; \
3029#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
3031 volatile OrigFn _orig = (orig); \
3032 volatile unsigned long _argvec[3]; \
3033 volatile unsigned long _res; \
3034 _argvec[0] = (unsigned long)_orig.nraddr; \
3035 _argvec[1] = (unsigned long)arg1; \
3036 _argvec[2] = (unsigned long)arg2; \
3037 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3040 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3041 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3043 :
"a"(&_argvec[0])__FRAME_POINTER \
3044 :
"cc", "memory", __CALLER_SAVED_REGS, "7"); \
3045 lval = (__typeof__(lval))_res; \
3048#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
3050 volatile OrigFn _orig = (orig); \
3051 volatile unsigned long _argvec[4]; \
3052 volatile unsigned long _res; \
3053 _argvec[0] = (unsigned long)_orig.nraddr; \
3054 _argvec[1] = (unsigned long)arg1; \
3055 _argvec[2] = (unsigned long)arg2; \
3056 _argvec[3] = (unsigned long)arg3; \
3057 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3061 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3062 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3064 :
"a"(&_argvec[0])__FRAME_POINTER \
3065 :
"cc", "memory", __CALLER_SAVED_REGS, "7"); \
3066 lval = (__typeof__(lval))_res; \
3069#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
3071 volatile OrigFn _orig = (orig); \
3072 volatile unsigned long _argvec[5]; \
3073 volatile unsigned long _res; \
3074 _argvec[0] = (unsigned long)_orig.nraddr; \
3075 _argvec[1] = (unsigned long)arg1; \
3076 _argvec[2] = (unsigned long)arg2; \
3077 _argvec[3] = (unsigned long)arg3; \
3078 _argvec[4] = (unsigned long)arg4; \
3079 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3084 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3085 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3087 :
"a"(&_argvec[0])__FRAME_POINTER \
3088 :
"cc", "memory", __CALLER_SAVED_REGS, "7"); \
3089 lval = (__typeof__(lval))_res; \
3092#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
3094 volatile OrigFn _orig = (orig); \
3095 volatile unsigned long _argvec[6]; \
3096 volatile unsigned long _res; \
3097 _argvec[0] = (unsigned long)_orig.nraddr; \
3098 _argvec[1] = (unsigned long)arg1; \
3099 _argvec[2] = (unsigned long)arg2; \
3100 _argvec[3] = (unsigned long)arg3; \
3101 _argvec[4] = (unsigned long)arg4; \
3102 _argvec[5] = (unsigned long)arg5; \
3103 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-160\n\t" \
3109 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3110 "aghi 15,160\n\t" VALGRIND_CFI_EPILOGUE \
3112 :
"a"(&_argvec[0])__FRAME_POINTER \
3113 :
"cc", "memory", __CALLER_SAVED_REGS, "6", "7"); \
3114 lval = (__typeof__(lval))_res; \
3117#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6) \
3119 volatile OrigFn _orig = (orig); \
3120 volatile unsigned long _argvec[7]; \
3121 volatile unsigned long _res; \
3122 _argvec[0] = (unsigned long)_orig.nraddr; \
3123 _argvec[1] = (unsigned long)arg1; \
3124 _argvec[2] = (unsigned long)arg2; \
3125 _argvec[3] = (unsigned long)arg3; \
3126 _argvec[4] = (unsigned long)arg4; \
3127 _argvec[5] = (unsigned long)arg5; \
3128 _argvec[6] = (unsigned long)arg6; \
3129 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-168\n\t" \
3135 "mvc 160(8,15), 48(1)\n\t" \
3136 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3137 "aghi 15,168\n\t" VALGRIND_CFI_EPILOGUE \
3139 :
"a"(&_argvec[0])__FRAME_POINTER \
3140 :
"cc", "memory", __CALLER_SAVED_REGS, "6", "7"); \
3141 lval = (__typeof__(lval))_res; \
3144#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7) \
3146 volatile OrigFn _orig = (orig); \
3147 volatile unsigned long _argvec[8]; \
3148 volatile unsigned long _res; \
3149 _argvec[0] = (unsigned long)_orig.nraddr; \
3150 _argvec[1] = (unsigned long)arg1; \
3151 _argvec[2] = (unsigned long)arg2; \
3152 _argvec[3] = (unsigned long)arg3; \
3153 _argvec[4] = (unsigned long)arg4; \
3154 _argvec[5] = (unsigned long)arg5; \
3155 _argvec[6] = (unsigned long)arg6; \
3156 _argvec[7] = (unsigned long)arg7; \
3157 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-176\n\t" \
3163 "mvc 160(8,15), 48(1)\n\t" \
3164 "mvc 168(8,15), 56(1)\n\t" \
3165 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3166 "aghi 15,176\n\t" VALGRIND_CFI_EPILOGUE \
3168 :
"a"(&_argvec[0])__FRAME_POINTER \
3169 :
"cc", "memory", __CALLER_SAVED_REGS, "6", "7"); \
3170 lval = (__typeof__(lval))_res; \
3173#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) \
3175 volatile OrigFn _orig = (orig); \
3176 volatile unsigned long _argvec[9]; \
3177 volatile unsigned long _res; \
3178 _argvec[0] = (unsigned long)_orig.nraddr; \
3179 _argvec[1] = (unsigned long)arg1; \
3180 _argvec[2] = (unsigned long)arg2; \
3181 _argvec[3] = (unsigned long)arg3; \
3182 _argvec[4] = (unsigned long)arg4; \
3183 _argvec[5] = (unsigned long)arg5; \
3184 _argvec[6] = (unsigned long)arg6; \
3185 _argvec[7] = (unsigned long)arg7; \
3186 _argvec[8] = (unsigned long)arg8; \
3187 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-184\n\t" \
3193 "mvc 160(8,15), 48(1)\n\t" \
3194 "mvc 168(8,15), 56(1)\n\t" \
3195 "mvc 176(8,15), 64(1)\n\t" \
3196 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3197 "aghi 15,184\n\t" VALGRIND_CFI_EPILOGUE \
3199 :
"a"(&_argvec[0])__FRAME_POINTER \
3200 :
"cc", "memory", __CALLER_SAVED_REGS, "6", "7"); \
3201 lval = (__typeof__(lval))_res; \
3204#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) \
3206 volatile OrigFn _orig = (orig); \
3207 volatile unsigned long _argvec[10]; \
3208 volatile unsigned long _res; \
3209 _argvec[0] = (unsigned long)_orig.nraddr; \
3210 _argvec[1] = (unsigned long)arg1; \
3211 _argvec[2] = (unsigned long)arg2; \
3212 _argvec[3] = (unsigned long)arg3; \
3213 _argvec[4] = (unsigned long)arg4; \
3214 _argvec[5] = (unsigned long)arg5; \
3215 _argvec[6] = (unsigned long)arg6; \
3216 _argvec[7] = (unsigned long)arg7; \
3217 _argvec[8] = (unsigned long)arg8; \
3218 _argvec[9] = (unsigned long)arg9; \
3219 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-192\n\t" \
3225 "mvc 160(8,15), 48(1)\n\t" \
3226 "mvc 168(8,15), 56(1)\n\t" \
3227 "mvc 176(8,15), 64(1)\n\t" \
3228 "mvc 184(8,15), 72(1)\n\t" \
3229 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3230 "aghi 15,192\n\t" VALGRIND_CFI_EPILOGUE \
3232 :
"a"(&_argvec[0])__FRAME_POINTER \
3233 :
"cc", "memory", __CALLER_SAVED_REGS, "6", "7"); \
3234 lval = (__typeof__(lval))_res; \
3237#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10) \
3239 volatile OrigFn _orig = (orig); \
3240 volatile unsigned long _argvec[11]; \
3241 volatile unsigned long _res; \
3242 _argvec[0] = (unsigned long)_orig.nraddr; \
3243 _argvec[1] = (unsigned long)arg1; \
3244 _argvec[2] = (unsigned long)arg2; \
3245 _argvec[3] = (unsigned long)arg3; \
3246 _argvec[4] = (unsigned long)arg4; \
3247 _argvec[5] = (unsigned long)arg5; \
3248 _argvec[6] = (unsigned long)arg6; \
3249 _argvec[7] = (unsigned long)arg7; \
3250 _argvec[8] = (unsigned long)arg8; \
3251 _argvec[9] = (unsigned long)arg9; \
3252 _argvec[10] = (unsigned long)arg10; \
3253 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-200\n\t" \
3259 "mvc 160(8,15), 48(1)\n\t" \
3260 "mvc 168(8,15), 56(1)\n\t" \
3261 "mvc 176(8,15), 64(1)\n\t" \
3262 "mvc 184(8,15), 72(1)\n\t" \
3263 "mvc 192(8,15), 80(1)\n\t" \
3264 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3265 "aghi 15,200\n\t" VALGRIND_CFI_EPILOGUE \
3267 :
"a"(&_argvec[0])__FRAME_POINTER \
3268 :
"cc", "memory", __CALLER_SAVED_REGS, "6", "7"); \
3269 lval = (__typeof__(lval))_res; \
3272#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11) \
3274 volatile OrigFn _orig = (orig); \
3275 volatile unsigned long _argvec[12]; \
3276 volatile unsigned long _res; \
3277 _argvec[0] = (unsigned long)_orig.nraddr; \
3278 _argvec[1] = (unsigned long)arg1; \
3279 _argvec[2] = (unsigned long)arg2; \
3280 _argvec[3] = (unsigned long)arg3; \
3281 _argvec[4] = (unsigned long)arg4; \
3282 _argvec[5] = (unsigned long)arg5; \
3283 _argvec[6] = (unsigned long)arg6; \
3284 _argvec[7] = (unsigned long)arg7; \
3285 _argvec[8] = (unsigned long)arg8; \
3286 _argvec[9] = (unsigned long)arg9; \
3287 _argvec[10] = (unsigned long)arg10; \
3288 _argvec[11] = (unsigned long)arg11; \
3289 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-208\n\t" \
3295 "mvc 160(8,15), 48(1)\n\t" \
3296 "mvc 168(8,15), 56(1)\n\t" \
3297 "mvc 176(8,15), 64(1)\n\t" \
3298 "mvc 184(8,15), 72(1)\n\t" \
3299 "mvc 192(8,15), 80(1)\n\t" \
3300 "mvc 200(8,15), 88(1)\n\t" \
3301 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3302 "aghi 15,208\n\t" VALGRIND_CFI_EPILOGUE \
3304 :
"a"(&_argvec[0])__FRAME_POINTER \
3305 :
"cc", "memory", __CALLER_SAVED_REGS, "6", "7"); \
3306 lval = (__typeof__(lval))_res; \
3309#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12) \
3311 volatile OrigFn _orig = (orig); \
3312 volatile unsigned long _argvec[13]; \
3313 volatile unsigned long _res; \
3314 _argvec[0] = (unsigned long)_orig.nraddr; \
3315 _argvec[1] = (unsigned long)arg1; \
3316 _argvec[2] = (unsigned long)arg2; \
3317 _argvec[3] = (unsigned long)arg3; \
3318 _argvec[4] = (unsigned long)arg4; \
3319 _argvec[5] = (unsigned long)arg5; \
3320 _argvec[6] = (unsigned long)arg6; \
3321 _argvec[7] = (unsigned long)arg7; \
3322 _argvec[8] = (unsigned long)arg8; \
3323 _argvec[9] = (unsigned long)arg9; \
3324 _argvec[10] = (unsigned long)arg10; \
3325 _argvec[11] = (unsigned long)arg11; \
3326 _argvec[12] = (unsigned long)arg12; \
3327 __asm__ volatile(VALGRIND_CFI_PROLOGUE "aghi 15,-216\n\t" \
3333 "mvc 160(8,15), 48(1)\n\t" \
3334 "mvc 168(8,15), 56(1)\n\t" \
3335 "mvc 176(8,15), 64(1)\n\t" \
3336 "mvc 184(8,15), 72(1)\n\t" \
3337 "mvc 192(8,15), 80(1)\n\t" \
3338 "mvc 200(8,15), 88(1)\n\t" \
3339 "mvc 208(8,15), 96(1)\n\t" \
3340 "lg 1, 0(1)\n\t" VALGRIND_CALL_NOREDIR_R1 "lgr %0, 2\n\t" \
3341 "aghi 15,216\n\t" VALGRIND_CFI_EPILOGUE \
3343 :
"a"(&_argvec[0])__FRAME_POINTER \
3344 :
"cc", "memory", __CALLER_SAVED_REGS, "6", "7"); \
3345 lval = (__typeof__(lval))_res; \
3353#if defined(PLAT_mips32_linux)
3356#define __CALLER_SAVED_REGS \
3357 "$2", "$3", "$4", "$5", "$6", "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", "$25", "$31"
3362#define CALL_FN_W_v(lval, orig) \
3364 volatile OrigFn _orig = (orig); \
3365 volatile unsigned long _argvec[1]; \
3366 volatile unsigned long _res; \
3367 _argvec[0] = (unsigned long)_orig.nraddr; \
3368 __asm__ volatile("subu $29, $29, 8 \n\t" \
3369 "sw $gp, 0($sp) \n\t" \
3370 "sw $ra, 4($sp) \n\t" \
3371 "subu $29, $29, 16 \n\t" \
3372 "lw $t9, 0(%1) \n\t"
\
3373 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16\n\t" \
3374 "lw $gp, 0($sp) \n\t" \
3375 "lw $ra, 4($sp) \n\t" \
3376 "addu $29, $29, 8 \n\t" \
3379 :
"0"(&_argvec[0]) \
3380 :
"cc", "memory", __CALLER_SAVED_REGS); \
3381 lval = (__typeof__(lval))_res; \
3384#define CALL_FN_W_W(lval, orig, arg1) \
3386 volatile OrigFn _orig = (orig); \
3387 volatile unsigned long _argvec[2]; \
3388 volatile unsigned long _res; \
3389 _argvec[0] = (unsigned long)_orig.nraddr; \
3390 _argvec[1] = (unsigned long)(arg1); \
3391 __asm__ volatile("subu $29, $29, 8 \n\t" \
3392 "sw $gp, 0($sp) \n\t" \
3393 "sw $ra, 4($sp) \n\t" \
3394 "subu $29, $29, 16 \n\t" \
3395 "lw $a0, 4(%1) \n\t"
\
3396 "lw $t9, 0(%1) \n\t" \
3397 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3398 "lw $gp, 0($sp) \n\t" \
3399 "lw $ra, 4($sp) \n\t" \
3400 "addu $29, $29, 8 \n\t" \
3403 :
"0"(&_argvec[0]) \
3404 :
"cc", "memory", __CALLER_SAVED_REGS); \
3405 lval = (__typeof__(lval))_res; \
3408#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
3410 volatile OrigFn _orig = (orig); \
3411 volatile unsigned long _argvec[3]; \
3412 volatile unsigned long _res; \
3413 _argvec[0] = (unsigned long)_orig.nraddr; \
3414 _argvec[1] = (unsigned long)(arg1); \
3415 _argvec[2] = (unsigned long)(arg2); \
3416 __asm__ volatile("subu $29, $29, 8 \n\t" \
3417 "sw $gp, 0($sp) \n\t" \
3418 "sw $ra, 4($sp) \n\t" \
3419 "subu $29, $29, 16 \n\t" \
3420 "lw $a0, 4(%1) \n\t" \
3421 "lw $a1, 8(%1) \n\t" \
3422 "lw $t9, 0(%1) \n\t"
\
3423 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3424 "lw $gp, 0($sp) \n\t" \
3425 "lw $ra, 4($sp) \n\t" \
3426 "addu $29, $29, 8 \n\t" \
3429 :
"0"(&_argvec[0]) \
3430 :
"cc", "memory", __CALLER_SAVED_REGS); \
3431 lval = (__typeof__(lval))_res; \
3434#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
3436 volatile OrigFn _orig = (orig); \
3437 volatile unsigned long _argvec[4]; \
3438 volatile unsigned long _res; \
3439 _argvec[0] = (unsigned long)_orig.nraddr; \
3440 _argvec[1] = (unsigned long)(arg1); \
3441 _argvec[2] = (unsigned long)(arg2); \
3442 _argvec[3] = (unsigned long)(arg3); \
3443 __asm__ volatile("subu $29, $29, 8 \n\t" \
3444 "sw $gp, 0($sp) \n\t" \
3445 "sw $ra, 4($sp) \n\t" \
3446 "subu $29, $29, 16 \n\t" \
3447 "lw $a0, 4(%1) \n\t" \
3448 "lw $a1, 8(%1) \n\t" \
3449 "lw $a2, 12(%1) \n\t" \
3450 "lw $t9, 0(%1) \n\t"
\
3451 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3452 "lw $gp, 0($sp) \n\t" \
3453 "lw $ra, 4($sp) \n\t" \
3454 "addu $29, $29, 8 \n\t" \
3457 :
"0"(&_argvec[0]) \
3458 :
"cc", "memory", __CALLER_SAVED_REGS); \
3459 lval = (__typeof__(lval))_res; \
3462#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
3464 volatile OrigFn _orig = (orig); \
3465 volatile unsigned long _argvec[5]; \
3466 volatile unsigned long _res; \
3467 _argvec[0] = (unsigned long)_orig.nraddr; \
3468 _argvec[1] = (unsigned long)(arg1); \
3469 _argvec[2] = (unsigned long)(arg2); \
3470 _argvec[3] = (unsigned long)(arg3); \
3471 _argvec[4] = (unsigned long)(arg4); \
3472 __asm__ volatile("subu $29, $29, 8 \n\t" \
3473 "sw $gp, 0($sp) \n\t" \
3474 "sw $ra, 4($sp) \n\t" \
3475 "subu $29, $29, 16 \n\t" \
3476 "lw $a0, 4(%1) \n\t" \
3477 "lw $a1, 8(%1) \n\t" \
3478 "lw $a2, 12(%1) \n\t" \
3479 "lw $a3, 16(%1) \n\t" \
3480 "lw $t9, 0(%1) \n\t"
\
3481 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 16 \n\t" \
3482 "lw $gp, 0($sp) \n\t" \
3483 "lw $ra, 4($sp) \n\t" \
3484 "addu $29, $29, 8 \n\t" \
3487 :
"0"(&_argvec[0]) \
3488 :
"cc", "memory", __CALLER_SAVED_REGS); \
3489 lval = (__typeof__(lval))_res; \
3492#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
3494 volatile OrigFn _orig = (orig); \
3495 volatile unsigned long _argvec[6]; \
3496 volatile unsigned long _res; \
3497 _argvec[0] = (unsigned long)_orig.nraddr; \
3498 _argvec[1] = (unsigned long)(arg1); \
3499 _argvec[2] = (unsigned long)(arg2); \
3500 _argvec[3] = (unsigned long)(arg3); \
3501 _argvec[4] = (unsigned long)(arg4); \
3502 _argvec[5] = (unsigned long)(arg5); \
3503 __asm__ volatile("subu $29, $29, 8 \n\t" \
3504 "sw $gp, 0($sp) \n\t" \
3505 "sw $ra, 4($sp) \n\t" \
3506 "lw $a0, 20(%1) \n\t" \
3507 "subu $sp, $sp, 24\n\t" \
3508 "sw $a0, 16($sp) \n\t" \
3509 "lw $a0, 4(%1) \n\t" \
3510 "lw $a1, 8(%1) \n\t" \
3511 "lw $a2, 12(%1) \n\t" \
3512 "lw $a3, 16(%1) \n\t" \
3513 "lw $t9, 0(%1) \n\t"
\
3514 VALGRIND_CALL_NOREDIR_T9 "addu $29, $29, 24 \n\t" \
3515 "lw $gp, 0($sp) \n\t" \
3516 "lw $ra, 4($sp) \n\t" \
3517 "addu $sp, $sp, 8 \n\t" \
3520 :
"0"(&_argvec[0]) \
3521 :
"cc", "memory", __CALLER_SAVED_REGS); \
3522 lval = (__typeof__(lval))_res; \
3524#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6) \
3526 volatile OrigFn _orig = (orig); \
3527 volatile unsigned long _argvec[7]; \
3528 volatile unsigned long _res; \
3529 _argvec[0] = (unsigned long)_orig.nraddr; \
3530 _argvec[1] = (unsigned long)(arg1); \
3531 _argvec[2] = (unsigned long)(arg2); \
3532 _argvec[3] = (unsigned long)(arg3); \
3533 _argvec[4] = (unsigned long)(arg4); \
3534 _argvec[5] = (unsigned long)(arg5); \
3535 _argvec[6] = (unsigned long)(arg6); \
3536 __asm__ volatile("subu $29, $29, 8 \n\t" \
3537 "sw $gp, 0($sp) \n\t" \
3538 "sw $ra, 4($sp) \n\t" \
3539 "lw $a0, 20(%1) \n\t" \
3540 "subu $sp, $sp, 32\n\t" \
3541 "sw $a0, 16($sp) \n\t" \
3542 "lw $a0, 24(%1) \n\t" \
3544 "sw $a0, 20($sp) \n\t" \
3545 "lw $a0, 4(%1) \n\t" \
3546 "lw $a1, 8(%1) \n\t" \
3547 "lw $a2, 12(%1) \n\t" \
3548 "lw $a3, 16(%1) \n\t" \
3549 "lw $t9, 0(%1) \n\t"
\
3550 VALGRIND_CALL_NOREDIR_T9 "addu $sp, $sp, 32 \n\t" \
3551 "lw $gp, 0($sp) \n\t" \
3552 "lw $ra, 4($sp) \n\t" \
3553 "addu $sp, $sp, 8 \n\t" \
3556 :
"0"(&_argvec[0]) \
3557 :
"cc", "memory", __CALLER_SAVED_REGS); \
3558 lval = (__typeof__(lval))_res; \
3561#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7) \
3563 volatile OrigFn _orig = (orig); \
3564 volatile unsigned long _argvec[8]; \
3565 volatile unsigned long _res; \
3566 _argvec[0] = (unsigned long)_orig.nraddr; \
3567 _argvec[1] = (unsigned long)(arg1); \
3568 _argvec[2] = (unsigned long)(arg2); \
3569 _argvec[3] = (unsigned long)(arg3); \
3570 _argvec[4] = (unsigned long)(arg4); \
3571 _argvec[5] = (unsigned long)(arg5); \
3572 _argvec[6] = (unsigned long)(arg6); \
3573 _argvec[7] = (unsigned long)(arg7); \
3574 __asm__ volatile("subu $29, $29, 8 \n\t" \
3575 "sw $gp, 0($sp) \n\t" \
3576 "sw $ra, 4($sp) \n\t" \
3577 "lw $a0, 20(%1) \n\t" \
3578 "subu $sp, $sp, 32\n\t" \
3579 "sw $a0, 16($sp) \n\t" \
3580 "lw $a0, 24(%1) \n\t" \
3581 "sw $a0, 20($sp) \n\t" \
3582 "lw $a0, 28(%1) \n\t" \
3583 "sw $a0, 24($sp) \n\t" \
3584 "lw $a0, 4(%1) \n\t" \
3585 "lw $a1, 8(%1) \n\t" \
3586 "lw $a2, 12(%1) \n\t" \
3587 "lw $a3, 16(%1) \n\t" \
3588 "lw $t9, 0(%1) \n\t"
\
3589 VALGRIND_CALL_NOREDIR_T9 "addu $sp, $sp, 32 \n\t" \
3590 "lw $gp, 0($sp) \n\t" \
3591 "lw $ra, 4($sp) \n\t" \
3592 "addu $sp, $sp, 8 \n\t" \
3595 :
"0"(&_argvec[0]) \
3596 :
"cc", "memory", __CALLER_SAVED_REGS); \
3597 lval = (__typeof__(lval))_res; \
3600#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) \
3602 volatile OrigFn _orig = (orig); \
3603 volatile unsigned long _argvec[9]; \
3604 volatile unsigned long _res; \
3605 _argvec[0] = (unsigned long)_orig.nraddr; \
3606 _argvec[1] = (unsigned long)(arg1); \
3607 _argvec[2] = (unsigned long)(arg2); \
3608 _argvec[3] = (unsigned long)(arg3); \
3609 _argvec[4] = (unsigned long)(arg4); \
3610 _argvec[5] = (unsigned long)(arg5); \
3611 _argvec[6] = (unsigned long)(arg6); \
3612 _argvec[7] = (unsigned long)(arg7); \
3613 _argvec[8] = (unsigned long)(arg8); \
3614 __asm__ volatile("subu $29, $29, 8 \n\t" \
3615 "sw $gp, 0($sp) \n\t" \
3616 "sw $ra, 4($sp) \n\t" \
3617 "lw $a0, 20(%1) \n\t" \
3618 "subu $sp, $sp, 40\n\t" \
3619 "sw $a0, 16($sp) \n\t" \
3620 "lw $a0, 24(%1) \n\t" \
3621 "sw $a0, 20($sp) \n\t" \
3622 "lw $a0, 28(%1) \n\t" \
3623 "sw $a0, 24($sp) \n\t" \
3624 "lw $a0, 32(%1) \n\t" \
3625 "sw $a0, 28($sp) \n\t" \
3626 "lw $a0, 4(%1) \n\t" \
3627 "lw $a1, 8(%1) \n\t" \
3628 "lw $a2, 12(%1) \n\t" \
3629 "lw $a3, 16(%1) \n\t" \
3630 "lw $t9, 0(%1) \n\t"
\
3631 VALGRIND_CALL_NOREDIR_T9 "addu $sp, $sp, 40 \n\t" \
3632 "lw $gp, 0($sp) \n\t" \
3633 "lw $ra, 4($sp) \n\t" \
3634 "addu $sp, $sp, 8 \n\t" \
3637 :
"0"(&_argvec[0]) \
3638 :
"cc", "memory", __CALLER_SAVED_REGS); \
3639 lval = (__typeof__(lval))_res; \
3642#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) \
3644 volatile OrigFn _orig = (orig); \
3645 volatile unsigned long _argvec[10]; \
3646 volatile unsigned long _res; \
3647 _argvec[0] = (unsigned long)_orig.nraddr; \
3648 _argvec[1] = (unsigned long)(arg1); \
3649 _argvec[2] = (unsigned long)(arg2); \
3650 _argvec[3] = (unsigned long)(arg3); \
3651 _argvec[4] = (unsigned long)(arg4); \
3652 _argvec[5] = (unsigned long)(arg5); \
3653 _argvec[6] = (unsigned long)(arg6); \
3654 _argvec[7] = (unsigned long)(arg7); \
3655 _argvec[8] = (unsigned long)(arg8); \
3656 _argvec[9] = (unsigned long)(arg9); \
3657 __asm__ volatile("subu $29, $29, 8 \n\t" \
3658 "sw $gp, 0($sp) \n\t" \
3659 "sw $ra, 4($sp) \n\t" \
3660 "lw $a0, 20(%1) \n\t" \
3661 "subu $sp, $sp, 40\n\t" \
3662 "sw $a0, 16($sp) \n\t" \
3663 "lw $a0, 24(%1) \n\t" \
3664 "sw $a0, 20($sp) \n\t" \
3665 "lw $a0, 28(%1) \n\t" \
3666 "sw $a0, 24($sp) \n\t" \
3667 "lw $a0, 32(%1) \n\t" \
3668 "sw $a0, 28($sp) \n\t" \
3669 "lw $a0, 36(%1) \n\t" \
3670 "sw $a0, 32($sp) \n\t" \
3671 "lw $a0, 4(%1) \n\t" \
3672 "lw $a1, 8(%1) \n\t" \
3673 "lw $a2, 12(%1) \n\t" \
3674 "lw $a3, 16(%1) \n\t" \
3675 "lw $t9, 0(%1) \n\t"
\
3676 VALGRIND_CALL_NOREDIR_T9 "addu $sp, $sp, 40 \n\t" \
3677 "lw $gp, 0($sp) \n\t" \
3678 "lw $ra, 4($sp) \n\t" \
3679 "addu $sp, $sp, 8 \n\t" \
3682 :
"0"(&_argvec[0]) \
3683 :
"cc", "memory", __CALLER_SAVED_REGS); \
3684 lval = (__typeof__(lval))_res; \
3687#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10) \
3689 volatile OrigFn _orig = (orig); \
3690 volatile unsigned long _argvec[11]; \
3691 volatile unsigned long _res; \
3692 _argvec[0] = (unsigned long)_orig.nraddr; \
3693 _argvec[1] = (unsigned long)(arg1); \
3694 _argvec[2] = (unsigned long)(arg2); \
3695 _argvec[3] = (unsigned long)(arg3); \
3696 _argvec[4] = (unsigned long)(arg4); \
3697 _argvec[5] = (unsigned long)(arg5); \
3698 _argvec[6] = (unsigned long)(arg6); \
3699 _argvec[7] = (unsigned long)(arg7); \
3700 _argvec[8] = (unsigned long)(arg8); \
3701 _argvec[9] = (unsigned long)(arg9); \
3702 _argvec[10] = (unsigned long)(arg10); \
3703 __asm__ volatile("subu $29, $29, 8 \n\t" \
3704 "sw $gp, 0($sp) \n\t" \
3705 "sw $ra, 4($sp) \n\t" \
3706 "lw $a0, 20(%1) \n\t" \
3707 "subu $sp, $sp, 48\n\t" \
3708 "sw $a0, 16($sp) \n\t" \
3709 "lw $a0, 24(%1) \n\t" \
3710 "sw $a0, 20($sp) \n\t" \
3711 "lw $a0, 28(%1) \n\t" \
3712 "sw $a0, 24($sp) \n\t" \
3713 "lw $a0, 32(%1) \n\t" \
3714 "sw $a0, 28($sp) \n\t" \
3715 "lw $a0, 36(%1) \n\t" \
3716 "sw $a0, 32($sp) \n\t" \
3717 "lw $a0, 40(%1) \n\t" \
3718 "sw $a0, 36($sp) \n\t" \
3719 "lw $a0, 4(%1) \n\t" \
3720 "lw $a1, 8(%1) \n\t" \
3721 "lw $a2, 12(%1) \n\t" \
3722 "lw $a3, 16(%1) \n\t" \
3723 "lw $t9, 0(%1) \n\t"
\
3724 VALGRIND_CALL_NOREDIR_T9 "addu $sp, $sp, 48 \n\t" \
3725 "lw $gp, 0($sp) \n\t" \
3726 "lw $ra, 4($sp) \n\t" \
3727 "addu $sp, $sp, 8 \n\t" \
3730 :
"0"(&_argvec[0]) \
3731 :
"cc", "memory", __CALLER_SAVED_REGS); \
3732 lval = (__typeof__(lval))_res; \
3735#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11) \
3737 volatile OrigFn _orig = (orig); \
3738 volatile unsigned long _argvec[12]; \
3739 volatile unsigned long _res; \
3740 _argvec[0] = (unsigned long)_orig.nraddr; \
3741 _argvec[1] = (unsigned long)(arg1); \
3742 _argvec[2] = (unsigned long)(arg2); \
3743 _argvec[3] = (unsigned long)(arg3); \
3744 _argvec[4] = (unsigned long)(arg4); \
3745 _argvec[5] = (unsigned long)(arg5); \
3746 _argvec[6] = (unsigned long)(arg6); \
3747 _argvec[7] = (unsigned long)(arg7); \
3748 _argvec[8] = (unsigned long)(arg8); \
3749 _argvec[9] = (unsigned long)(arg9); \
3750 _argvec[10] = (unsigned long)(arg10); \
3751 _argvec[11] = (unsigned long)(arg11); \
3752 __asm__ volatile("subu $29, $29, 8 \n\t" \
3753 "sw $gp, 0($sp) \n\t" \
3754 "sw $ra, 4($sp) \n\t" \
3755 "lw $a0, 20(%1) \n\t" \
3756 "subu $sp, $sp, 48\n\t" \
3757 "sw $a0, 16($sp) \n\t" \
3758 "lw $a0, 24(%1) \n\t" \
3759 "sw $a0, 20($sp) \n\t" \
3760 "lw $a0, 28(%1) \n\t" \
3761 "sw $a0, 24($sp) \n\t" \
3762 "lw $a0, 32(%1) \n\t" \
3763 "sw $a0, 28($sp) \n\t" \
3764 "lw $a0, 36(%1) \n\t" \
3765 "sw $a0, 32($sp) \n\t" \
3766 "lw $a0, 40(%1) \n\t" \
3767 "sw $a0, 36($sp) \n\t" \
3768 "lw $a0, 44(%1) \n\t" \
3769 "sw $a0, 40($sp) \n\t" \
3770 "lw $a0, 4(%1) \n\t" \
3771 "lw $a1, 8(%1) \n\t" \
3772 "lw $a2, 12(%1) \n\t" \
3773 "lw $a3, 16(%1) \n\t" \
3774 "lw $t9, 0(%1) \n\t"
\
3775 VALGRIND_CALL_NOREDIR_T9 "addu $sp, $sp, 48 \n\t" \
3776 "lw $gp, 0($sp) \n\t" \
3777 "lw $ra, 4($sp) \n\t" \
3778 "addu $sp, $sp, 8 \n\t" \
3781 :
"0"(&_argvec[0]) \
3782 :
"cc", "memory", __CALLER_SAVED_REGS); \
3783 lval = (__typeof__(lval))_res; \
3786#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11, arg12) \
3788 volatile OrigFn _orig = (orig); \
3789 volatile unsigned long _argvec[13]; \
3790 volatile unsigned long _res; \
3791 _argvec[0] = (unsigned long)_orig.nraddr; \
3792 _argvec[1] = (unsigned long)(arg1); \
3793 _argvec[2] = (unsigned long)(arg2); \
3794 _argvec[3] = (unsigned long)(arg3); \
3795 _argvec[4] = (unsigned long)(arg4); \
3796 _argvec[5] = (unsigned long)(arg5); \
3797 _argvec[6] = (unsigned long)(arg6); \
3798 _argvec[7] = (unsigned long)(arg7); \
3799 _argvec[8] = (unsigned long)(arg8); \
3800 _argvec[9] = (unsigned long)(arg9); \
3801 _argvec[10] = (unsigned long)(arg10); \
3802 _argvec[11] = (unsigned long)(arg11); \
3803 _argvec[12] = (unsigned long)(arg12); \
3804 __asm__ volatile("subu $29, $29, 8 \n\t" \
3805 "sw $gp, 0($sp) \n\t" \
3806 "sw $ra, 4($sp) \n\t" \
3807 "lw $a0, 20(%1) \n\t" \
3808 "subu $sp, $sp, 56\n\t" \
3809 "sw $a0, 16($sp) \n\t" \
3810 "lw $a0, 24(%1) \n\t" \
3811 "sw $a0, 20($sp) \n\t" \
3812 "lw $a0, 28(%1) \n\t" \
3813 "sw $a0, 24($sp) \n\t" \
3814 "lw $a0, 32(%1) \n\t" \
3815 "sw $a0, 28($sp) \n\t" \
3816 "lw $a0, 36(%1) \n\t" \
3817 "sw $a0, 32($sp) \n\t" \
3818 "lw $a0, 40(%1) \n\t" \
3819 "sw $a0, 36($sp) \n\t" \
3820 "lw $a0, 44(%1) \n\t" \
3821 "sw $a0, 40($sp) \n\t" \
3822 "lw $a0, 48(%1) \n\t" \
3823 "sw $a0, 44($sp) \n\t" \
3824 "lw $a0, 4(%1) \n\t" \
3825 "lw $a1, 8(%1) \n\t" \
3826 "lw $a2, 12(%1) \n\t" \
3827 "lw $a3, 16(%1) \n\t" \
3828 "lw $t9, 0(%1) \n\t"
\
3829 VALGRIND_CALL_NOREDIR_T9 "addu $sp, $sp, 56 \n\t" \
3830 "lw $gp, 0($sp) \n\t" \
3831 "lw $ra, 4($sp) \n\t" \
3832 "addu $sp, $sp, 8 \n\t" \
3835 :
"0"(&_argvec[0]) \
3836 :
"cc", "memory", __CALLER_SAVED_REGS); \
3837 lval = (__typeof__(lval))_res; \
3858#define VG_USERREQ_TOOL_BASE(a, b) ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
3859#define VG_IS_TOOL_USERREQ(a, b, v) (VG_USERREQ_TOOL_BASE(a, b) == ((v)&0xffff0000))
3938#if !defined(__GNUC__)
3939#define __extension__
3947#define RUNNING_ON_VALGRIND \
3948 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , VG_USERREQ__RUNNING_ON_VALGRIND, 0, 0, 0, 0, 0)
3955#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr, _qzz_len) \
3956 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, _qzz_addr, _qzz_len, 0, 0, 0)
3964#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
3967static int VALGRIND_PRINTF(
const char *format, ...) __attribute__((format(__printf__, 1, 2), __unused__));
3970#if defined(_MSC_VER)
3975#if defined(NVALGRIND)
3978#if defined(_MSC_VER)
3981 unsigned long _qzz_res;
3984 va_start(vargs, format);
3985#if defined(_MSC_VER)
3987 (uintptr_t)&vargs, 0, 0, 0);
3990 (
unsigned long)&vargs, 0, 0, 0);
3993 return (
int)_qzz_res;
3997#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
4001#if defined(_MSC_VER)
4006#if defined(NVALGRIND)
4009#if defined(_MSC_VER)
4012 unsigned long _qzz_res;
4015 va_start(vargs, format);
4016#if defined(_MSC_VER)
4018 (uintptr_t)&vargs, 0, 0, 0);
4021 (
unsigned long)&vargs, 0, 0, 0);
4024 return (
int)_qzz_res;
4052#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
4053 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , VG_USERREQ__CLIENT_CALL0, _qyy_fn, 0, 0, 0, 0)
4055#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
4056 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , VG_USERREQ__CLIENT_CALL1, _qyy_fn, _qyy_arg1, 0, 0, 0)
4058#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
4059 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , VG_USERREQ__CLIENT_CALL2, _qyy_fn, _qyy_arg1, _qyy_arg2, \
4062#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
4063 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , VG_USERREQ__CLIENT_CALL3, _qyy_fn, _qyy_arg1, _qyy_arg2, \
4070#define VALGRIND_COUNT_ERRORS \
4071 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , VG_USERREQ__COUNT_ERRORS, 0, 0, 0, 0, 0)
4174#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
4175 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, addr, sizeB, rzB, is_zeroed, 0)
4180#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
4181 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, addr, oldSizeB, newSizeB, rzB, 0)
4186#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
4187 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, addr, rzB, 0, 0, 0)
4190#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
4191 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, pool, rzB, is_zeroed, 0, 0)
4194#define VALGRIND_DESTROY_MEMPOOL(pool) VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, pool, 0, 0, 0, 0)
4197#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
4198 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, pool, addr, size, 0, 0)
4201#define VALGRIND_MEMPOOL_FREE(pool, addr) VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, pool, addr, 0, 0, 0)
4204#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
4205 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, pool, addr, size, 0, 0)
4208#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
4209 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, poolA, poolB, 0, 0, 0)
4212#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
4213 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, pool, addrA, addrB, size, 0)
4216#define VALGRIND_MEMPOOL_EXISTS(pool) \
4217 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__MEMPOOL_EXISTS, pool, 0, 0, 0, 0)
4220#define VALGRIND_STACK_REGISTER(start, end) \
4221 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__STACK_REGISTER, start, end, 0, 0, 0)
4225#define VALGRIND_STACK_DEREGISTER(id) VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, id, 0, 0, 0, 0)
4228#define VALGRIND_STACK_CHANGE(id, start, end) \
4229 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, id, start, end, 0, 0)
4232#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
4233 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, fd, ptr, total_size, delta, 0)
4239#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
4240 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__MAP_IP_TO_SRCLOC, addr, buf64, 0, 0, 0)
4250#define VALGRIND_DISABLE_ERROR_REPORTING \
4251 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, 1, 0, 0, 0, 0)
4255#define VALGRIND_ENABLE_ERROR_REPORTING \
4256 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, -1, 0, 0, 0, 0)
4258#undef PLAT_x86_darwin
4259#undef PLAT_amd64_darwin
4260#undef PLAT_x86_win32
4261#undef PLAT_x86_linux
4262#undef PLAT_amd64_linux
4263#undef PLAT_ppc32_linux
4264#undef PLAT_ppc64_linux
4265#undef PLAT_arm_linux
4266#undef PLAT_s390x_linux
4267#undef PLAT_mips32_linux
static int VALGRIND_PRINTF_BACKTRACE(const char *format,...)
Definition: valgrind.h:4004
Vg_ClientRequest
Definition: valgrind.h:3865
@ VG_USERREQ__DESTROY_MEMPOOL
Definition: valgrind.h:3895
@ VG_USERREQ__MAP_IP_TO_SRCLOC
Definition: valgrind.h:3925
@ VG_USERREQ__LOAD_PDB_DEBUGINFO
Definition: valgrind.h:3922
@ VG_USERREQ__VEX_INIT_FOR_IRI
Definition: valgrind.h:3935
@ VG_USERREQ__PRINTF_BACKTRACE
Definition: valgrind.h:3911
@ VG_USERREQ__GDB_MONITOR_COMMAND
Definition: valgrind.h:3886
@ VG_USERREQ__MEMPOOL_ALLOC
Definition: valgrind.h:3896
@ VG_USERREQ__RESIZEINPLACE_BLOCK
Definition: valgrind.h:3891
@ VG_USERREQ__MALLOCLIKE_BLOCK
Definition: valgrind.h:3890
@ VG_USERREQ__COUNT_ERRORS
Definition: valgrind.h:3882
@ VG_USERREQ__STACK_REGISTER
Definition: valgrind.h:3917
@ VG_USERREQ__MEMPOOL_CHANGE
Definition: valgrind.h:3900
@ VG_USERREQ__PRINTF_VALIST_BY_REF
Definition: valgrind.h:3913
@ VG_USERREQ__RUNNING_ON_VALGRIND
Definition: valgrind.h:3866
@ VG_USERREQ__CLIENT_CALL0
Definition: valgrind.h:3874
@ VG_USERREQ__DISCARD_TRANSLATIONS
Definition: valgrind.h:3867
@ VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
Definition: valgrind.h:3914
@ VG_USERREQ__FREELIKE_BLOCK
Definition: valgrind.h:3892
@ VG_USERREQ__CREATE_MEMPOOL
Definition: valgrind.h:3894
@ VG_USERREQ__MOVE_MEMPOOL
Definition: valgrind.h:3899
@ VG_USERREQ__CLIENT_CALL3
Definition: valgrind.h:3877
@ VG_USERREQ__MEMPOOL_TRIM
Definition: valgrind.h:3898
@ VG_USERREQ__CLIENT_CALL2
Definition: valgrind.h:3876
@ VG_USERREQ__PRINTF
Definition: valgrind.h:3910
@ VG_USERREQ__CHANGE_ERR_DISABLEMENT
Definition: valgrind.h:3932
@ VG_USERREQ__STACK_CHANGE
Definition: valgrind.h:3919
@ VG_USERREQ__STACK_DEREGISTER
Definition: valgrind.h:3918
@ VG_USERREQ__MEMPOOL_FREE
Definition: valgrind.h:3897
@ VG_USERREQ__MEMPOOL_EXISTS
Definition: valgrind.h:3901
@ VG_USERREQ__CLIENT_CALL1
Definition: valgrind.h:3875
static int VALGRIND_PRINTF(const char *format,...)
Definition: valgrind.h:3973
#define VALGRIND_DO_CLIENT_REQUEST_EXPR(_zzq_default, _zzq_request, _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5)
Definition: valgrind.h:188