tcg/mips: fix broken CONFIG_TCG_PASS_AREG0 code

The CONFIG_TCG_PASS_AREG0 code for calling ld/st helpers was
broken in that it did not respect the ABI requirement that 64
bit values were passed in even-odd register pairs. The simplest
way to fix this is to implement some new utility functions
for marshalling function arguments into the correct registers
and stack, so that the code which sets up the address and
data arguments does not need to care whether there has been
a preceding env argument.

Based on commit 9716ef3b for ARM by Peter Maydell.

Signed-off-by: Aurelien Jarno <aurelien@aurel32.net>
Reviewed-by: Peter Maydell <peter.maydell@linaro.org>
This commit is contained in:
Aurelien Jarno 2012-08-27 22:13:27 +02:00
parent ce67604048
commit 18fec301cd

View File

@ -217,6 +217,9 @@ static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
tcg_regset_set(ct->u.regs, 0xffffffff);
#if defined(CONFIG_SOFTMMU)
tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
# if defined(CONFIG_TCG_PASS_AREG0) && (TARGET_LONG_BITS == 64)
tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
# endif
#endif
break;
case 'S': /* qemu_st constraint */
@ -224,10 +227,14 @@ static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
tcg_regset_set(ct->u.regs, 0xffffffff);
tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
#if defined(CONFIG_SOFTMMU)
# if TARGET_LONG_BITS == 64
# if (defined(CONFIG_TCG_PASS_AREG0) && TARGET_LONG_BITS == 32) || \
(!defined(CONFIG_TCG_PASS_AREG0) && TARGET_LONG_BITS == 64)
tcg_regset_reset_reg(ct->u.regs, TCG_REG_A1);
# endif
tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
# if defined(CONFIG_TCG_PASS_AREG0) && TARGET_LONG_BITS == 64
tcg_regset_reset_reg(ct->u.regs, TCG_REG_A3);
# endif
#endif
break;
case 'I':
@ -382,7 +389,10 @@ static inline void tcg_out_nop(TCGContext *s)
static inline void tcg_out_mov(TCGContext *s, TCGType type,
TCGReg ret, TCGReg arg)
{
tcg_out_opc_reg(s, OPC_ADDU, ret, arg, TCG_REG_ZERO);
/* Simple reg-reg move, optimising out the 'do nothing' case */
if (ret != arg) {
tcg_out_opc_reg(s, OPC_ADDU, ret, arg, TCG_REG_ZERO);
}
}
static inline void tcg_out_movi(TCGContext *s, TCGType type,
@ -503,6 +513,67 @@ static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
}
}
/* Helper routines for marshalling helper function arguments into
* the correct registers and stack.
* arg_num is where we want to put this argument, and is updated to be ready
* for the next call. arg is the argument itself. Note that arg_num 0..3 is
* real registers, 4+ on stack.
*
* We provide routines for arguments which are: immediate, 32 bit
* value in register, 16 and 8 bit values in register (which must be zero
* extended before use) and 64 bit value in a lo:hi register pair.
*/
#define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
{ \
if (*arg_num < 4) { \
DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
} else { \
DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
} \
(*arg_num)++; \
}
#define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8, TCGReg arg)
#undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
#define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16, TCGReg arg)
#undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
#define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
tcg_out_movi(s, TCG_TYPE_I32, A, arg);
DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32, uint32_t arg)
#undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
/* We don't use the macro for this one to avoid an unnecessary reg-reg
move when storing to the stack. */
static inline void tcg_out_call_iarg_reg32(TCGContext *s, int *arg_num,
TCGReg arg)
{
if (*arg_num < 4) {
tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[*arg_num], arg);
} else {
tcg_out_st(s, TCG_TYPE_I32, arg, TCG_REG_SP, 4 * (*arg_num));
}
(*arg_num)++;
}
static inline void tcg_out_call_iarg_reg64(TCGContext *s, int *arg_num,
TCGReg arg_low, TCGReg arg_high)
{
(*arg_num) = (*arg_num + 1) & ~1;
#if defined(TCG_TARGET_WORDS_BIGENDIAN)
tcg_out_call_iarg_reg32(s, arg_num, arg_high);
tcg_out_call_iarg_reg32(s, arg_num, arg_low);
#else
tcg_out_call_iarg_reg32(s, arg_num, arg_low);
tcg_out_call_iarg_reg32(s, arg_num, arg_high);
#endif
}
static void tcg_out_brcond(TCGContext *s, TCGCond cond, int arg1,
int arg2, int label_index)
{
@ -792,18 +863,18 @@ static void *qemu_st_helpers[4] = {
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
int opc)
{
int addr_regl, addr_reg1, addr_meml;
int addr_regl, addr_meml;
int data_regl, data_regh, data_reg1, data_reg2;
int mem_index, s_bits;
#if defined(CONFIG_SOFTMMU)
void *label1_ptr, *label2_ptr;
int sp_args;
int arg_num;
#endif
#if TARGET_LONG_BITS == 64
# if defined(CONFIG_SOFTMMU)
uint8_t *label3_ptr;
# endif
int addr_regh, addr_reg2, addr_memh;
int addr_regh, addr_memh;
#endif
data_regl = *args++;
if (opc == 3)
@ -831,18 +902,13 @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
}
#if TARGET_LONG_BITS == 64
# if defined(TCG_TARGET_WORDS_BIGENDIAN)
addr_reg1 = addr_regh;
addr_reg2 = addr_regl;
addr_memh = 0;
addr_meml = 4;
# else
addr_reg1 = addr_regl;
addr_reg2 = addr_regh;
addr_memh = 4;
addr_meml = 0;
# endif
#else
addr_reg1 = addr_regl;
addr_meml = 0;
#endif
@ -875,22 +941,17 @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
# endif
/* slow path */
sp_args = TCG_REG_A0;
tcg_out_mov(s, TCG_TYPE_I32, sp_args++, addr_reg1);
# if TARGET_LONG_BITS == 64
tcg_out_mov(s, TCG_TYPE_I32, sp_args++, addr_reg2);
arg_num = 0;
# ifdef CONFIG_TCG_PASS_AREG0
tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
# endif
tcg_out_movi(s, TCG_TYPE_I32, sp_args++, mem_index);
# if TARGET_LONG_BITS == 64
tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
# else
tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
# endif
tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_ld_helpers[s_bits]);
#ifdef CONFIG_TCG_PASS_AREG0
/* XXX/FIXME: suboptimal and incorrect for 64 on 32 bit */
tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[2],
tcg_target_call_iarg_regs[1]);
tcg_out_mov(s, TCG_TYPE_TL, tcg_target_call_iarg_regs[1],
tcg_target_call_iarg_regs[0]);
tcg_out_mov(s, TCG_TYPE_PTR, tcg_target_call_iarg_regs[0],
TCG_AREG0);
#endif
tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
tcg_out_nop(s);
@ -991,18 +1052,18 @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
int opc)
{
int addr_regl, addr_reg1, addr_meml;
int addr_regl, addr_meml;
int data_regl, data_regh, data_reg1, data_reg2;
int mem_index, s_bits;
#if defined(CONFIG_SOFTMMU)
uint8_t *label1_ptr, *label2_ptr;
int sp_args;
int arg_num;
#endif
#if TARGET_LONG_BITS == 64
# if defined(CONFIG_SOFTMMU)
uint8_t *label3_ptr;
# endif
int addr_regh, addr_reg2, addr_memh;
int addr_regh, addr_memh;
#endif
data_regl = *args++;
@ -1024,18 +1085,13 @@ static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
#if TARGET_LONG_BITS == 64
addr_regh = *args++;
# if defined(TCG_TARGET_WORDS_BIGENDIAN)
addr_reg1 = addr_regh;
addr_reg2 = addr_regl;
addr_memh = 0;
addr_meml = 4;
# else
addr_reg1 = addr_regl;
addr_reg2 = addr_regh;
addr_memh = 4;
addr_meml = 0;
# endif
#else
addr_reg1 = addr_regl;
addr_meml = 0;
#endif
mem_index = *args;
@ -1070,49 +1126,33 @@ static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
# endif
/* slow path */
sp_args = TCG_REG_A0;
tcg_out_mov(s, TCG_TYPE_I32, sp_args++, addr_reg1);
arg_num = 0;
# ifdef CONFIG_TCG_PASS_AREG0
tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
# endif
# if TARGET_LONG_BITS == 64
tcg_out_mov(s, TCG_TYPE_I32, sp_args++, addr_reg2);
tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
# else
tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
# endif
switch(opc) {
case 0:
tcg_out_opc_imm(s, OPC_ANDI, sp_args++, data_reg1, 0xff);
tcg_out_call_iarg_reg8(s, &arg_num, data_regl);
break;
case 1:
tcg_out_opc_imm(s, OPC_ANDI, sp_args++, data_reg1, 0xffff);
tcg_out_call_iarg_reg16(s, &arg_num, data_regl);
break;
case 2:
tcg_out_mov(s, TCG_TYPE_I32, sp_args++, data_reg1);
tcg_out_call_iarg_reg32(s, &arg_num, data_regl);
break;
case 3:
sp_args = (sp_args + 1) & ~1;
tcg_out_mov(s, TCG_TYPE_I32, sp_args++, data_reg1);
tcg_out_mov(s, TCG_TYPE_I32, sp_args++, data_reg2);
tcg_out_call_iarg_reg64(s, &arg_num, data_regl, data_regh);
break;
default:
tcg_abort();
}
if (sp_args > TCG_REG_A3) {
/* Push mem_index on the stack */
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, mem_index);
tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 16);
} else {
tcg_out_movi(s, TCG_TYPE_I32, sp_args, mem_index);
}
tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_st_helpers[s_bits]);
#ifdef CONFIG_TCG_PASS_AREG0
/* XXX/FIXME: suboptimal and incorrect for 64 on 32 bit */
tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[3],
tcg_target_call_iarg_regs[2]);
tcg_out_mov(s, TCG_TYPE_I64, tcg_target_call_iarg_regs[2],
tcg_target_call_iarg_regs[1]);
tcg_out_mov(s, TCG_TYPE_TL, tcg_target_call_iarg_regs[1],
tcg_target_call_iarg_regs[0]);
tcg_out_mov(s, TCG_TYPE_PTR, tcg_target_call_iarg_regs[0],
TCG_AREG0);
#endif
tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
tcg_out_nop(s);