target/arm: Split gen_add_CC and gen_sub_CC

Split out specific 32-bit and 64-bit functions.
These carry the same signature as tcg_gen_add_i64,
and so will be easier to pass as callbacks.

Retain gen_add_CC and gen_sub_CC during conversion.

Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
Reviewed-by: Peter Maydell <peter.maydell@linaro.org>
Message-id: 20230512144106.3608981-6-peter.maydell@linaro.org
[PMM: rebased]
Reviewed-by: Peter Maydell <peter.maydell@linaro.org>
Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
This commit is contained in:
Richard Henderson 2023-05-12 15:40:51 +01:00 committed by Peter Maydell
parent 45fda88ea2
commit 372b7ec3a8

View File

@ -682,9 +682,8 @@ static inline void gen_logic_CC(int sf, TCGv_i64 result)
}
/* dest = T0 + T1; compute C, N, V and Z flags */
static void gen_add_CC(int sf, TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
static void gen_add64_CC(TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
{
if (sf) {
TCGv_i64 result, flag, tmp;
result = tcg_temp_new_i64();
flag = tcg_temp_new_i64();
@ -703,8 +702,10 @@ static void gen_add_CC(int sf, TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
tcg_gen_extrh_i64_i32(cpu_VF, flag);
tcg_gen_mov_i64(dest, result);
} else {
/* 32 bit arithmetic */
}
static void gen_add32_CC(TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
{
TCGv_i32 t0_32 = tcg_temp_new_i32();
TCGv_i32 t1_32 = tcg_temp_new_i32();
TCGv_i32 tmp = tcg_temp_new_i32();
@ -719,12 +720,19 @@ static void gen_add_CC(int sf, TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
tcg_gen_andc_i32(cpu_VF, cpu_VF, tmp);
tcg_gen_extu_i32_i64(dest, cpu_NF);
}
static void gen_add_CC(int sf, TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
{
if (sf) {
gen_add64_CC(dest, t0, t1);
} else {
gen_add32_CC(dest, t0, t1);
}
}
/* dest = T0 - T1; compute C, N, V and Z flags */
static void gen_sub_CC(int sf, TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
static void gen_sub64_CC(TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
{
if (sf) {
/* 64 bit arithmetic */
TCGv_i64 result, flag, tmp;
@ -743,7 +751,10 @@ static void gen_sub_CC(int sf, TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
tcg_gen_and_i64(flag, flag, tmp);
tcg_gen_extrh_i64_i32(cpu_VF, flag);
tcg_gen_mov_i64(dest, result);
} else {
}
static void gen_sub32_CC(TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
{
/* 32 bit arithmetic */
TCGv_i32 t0_32 = tcg_temp_new_i32();
TCGv_i32 t1_32 = tcg_temp_new_i32();
@ -760,6 +771,14 @@ static void gen_sub_CC(int sf, TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
tcg_gen_and_i32(cpu_VF, cpu_VF, tmp);
tcg_gen_extu_i32_i64(dest, cpu_NF);
}
static void gen_sub_CC(int sf, TCGv_i64 dest, TCGv_i64 t0, TCGv_i64 t1)
{
if (sf) {
gen_sub64_CC(dest, t0, t1);
} else {
gen_sub32_CC(dest, t0, t1);
}
}
/* dest = T0 + T1 + CF; do not compute flags. */