Message ID | 1475227335-17098-4-git-send-email-martin@martin.st |
---|---|
State | Superseded |
Headers | show |
On Friday, 30 September 2016 11:22:15 CEST, Martin Storsjö wrote: > This, combined with clobbering the stack space prior to the call, > increases the chances of finding cases where 32 bit parameters > are erroneously treated as 64 bit. > --- > With the current aarch64 asm and checkasm tests, this found > one bug. > --- > tests/checkasm/aarch64/checkasm.S | 16 ++++++---------- > tests/checkasm/checkasm.h | 5 +++-- > 2 files changed, 9 insertions(+), 12 deletions(-) > > diff --git a/tests/checkasm/aarch64/checkasm.S > b/tests/checkasm/aarch64/checkasm.S > index a123ac1..d8dcaba 100644 > --- a/tests/checkasm/aarch64/checkasm.S > +++ b/tests/checkasm/aarch64/checkasm.S > @@ -94,22 +94,18 @@ function checkasm_checked_call, export=1 > > sub sp, sp, #ARG_STACK > .equ pos, 0 > -// the first stacked arg is copied to x7 > .rept MAX_ARGS-8 > - ldr x9, [x29, #16 + 8 + pos] > + // Skip the first 8 args, that are loaded into registers > + ldr x9, [x29, #16 + 8*8 + pos] > str x9, [sp, #pos] > .equ pos, pos + 8 > .endr > > mov x12, x0 > - mov x0, x1 > - mov x1, x2 > - mov x2, x3 > - mov x3, x4 > - mov x4, x5 > - mov x5, x6 > - mov x6, x7 > - ldr x7, [x29, #16] > + ldp x0, x1, [x29, #16] > + ldp x2, x3, [x29, #32] > + ldp x4, x5, [x29, #48] > + ldp x6, x7, [x29, #64] > blr x12 > add sp, sp, #ARG_STACK > stp x0, x1, [sp, #-16]! > diff --git a/tests/checkasm/checkasm.h b/tests/checkasm/checkasm.h > index 8d54cc7..82f95aa 100644 > --- a/tests/checkasm/checkasm.h > +++ b/tests/checkasm/checkasm.h > @@ -135,11 +135,12 @@ extern void (*checkasm_checked_call)(void > *func, int dummy, ...); > #elif ARCH_AARCH64 && !defined(__APPLE__) > void checkasm_stack_clobber(uint64_t clobber, ...); > void checkasm_checked_call(void *func, ...); > -#define declare_new(ret, ...) ret (*checked_call)(void *, > __VA_ARGS__) = (void *)checkasm_checked_call; > +#define declare_new(ret, ...) ret (*checked_call)(void *, int, > int, int, int, int, int, int, __VA_ARGS__)\ > + = (void *)checkasm_checked_call; > #define CLOB (UINT64_C(0xdeadbeefdeadbeef)) > #define call_new(...) > (checkasm_stack_clobber(CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,\ > > CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB),\ > - checked_call(func_new, __VA_ARGS__)) > + checked_call(func_new, 0, 0, 0, 0, 0, 0, > 0, __VA_ARGS__)) > #else > #define declare_new(ret, ...) > #define declare_new_emms(cpu_flags, ret, ...) patch ok Janne
diff --git a/tests/checkasm/aarch64/checkasm.S b/tests/checkasm/aarch64/checkasm.S index a123ac1..d8dcaba 100644 --- a/tests/checkasm/aarch64/checkasm.S +++ b/tests/checkasm/aarch64/checkasm.S @@ -94,22 +94,18 @@ function checkasm_checked_call, export=1 sub sp, sp, #ARG_STACK .equ pos, 0 -// the first stacked arg is copied to x7 .rept MAX_ARGS-8 - ldr x9, [x29, #16 + 8 + pos] + // Skip the first 8 args, that are loaded into registers + ldr x9, [x29, #16 + 8*8 + pos] str x9, [sp, #pos] .equ pos, pos + 8 .endr mov x12, x0 - mov x0, x1 - mov x1, x2 - mov x2, x3 - mov x3, x4 - mov x4, x5 - mov x5, x6 - mov x6, x7 - ldr x7, [x29, #16] + ldp x0, x1, [x29, #16] + ldp x2, x3, [x29, #32] + ldp x4, x5, [x29, #48] + ldp x6, x7, [x29, #64] blr x12 add sp, sp, #ARG_STACK stp x0, x1, [sp, #-16]! diff --git a/tests/checkasm/checkasm.h b/tests/checkasm/checkasm.h index 8d54cc7..82f95aa 100644 --- a/tests/checkasm/checkasm.h +++ b/tests/checkasm/checkasm.h @@ -135,11 +135,12 @@ extern void (*checkasm_checked_call)(void *func, int dummy, ...); #elif ARCH_AARCH64 && !defined(__APPLE__) void checkasm_stack_clobber(uint64_t clobber, ...); void checkasm_checked_call(void *func, ...); -#define declare_new(ret, ...) ret (*checked_call)(void *, __VA_ARGS__) = (void *)checkasm_checked_call; +#define declare_new(ret, ...) ret (*checked_call)(void *, int, int, int, int, int, int, int, __VA_ARGS__)\ + = (void *)checkasm_checked_call; #define CLOB (UINT64_C(0xdeadbeefdeadbeef)) #define call_new(...) (checkasm_stack_clobber(CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,\ CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB,CLOB),\ - checked_call(func_new, __VA_ARGS__)) + checked_call(func_new, 0, 0, 0, 0, 0, 0, 0, __VA_ARGS__)) #else #define declare_new(ret, ...) #define declare_new_emms(cpu_flags, ret, ...)