target-alpha: Move memory helpers to mem_helper.c.

This completes the transition away from AREG0.  This patch must
be last because it requires CONFIG_TCG_PASS_AREG0 set too.

Signed-off-by: Richard Henderson <rth@twiddle.net>
Signed-off-by: Blue Swirl <blauwirbel@gmail.com>
This commit is contained in:
Richard Henderson 2012-03-24 09:51:13 -07:00 committed by Blue Swirl
parent 69163fbb0c
commit c30827555d
5 changed files with 30 additions and 39 deletions

View File

@ -81,8 +81,10 @@ libobj-y += tcg/tcg.o tcg/optimize.o
libobj-$(CONFIG_TCG_INTERPRETER) += tci.o libobj-$(CONFIG_TCG_INTERPRETER) += tci.o
libobj-y += fpu/softfloat.o libobj-y += fpu/softfloat.o
ifneq ($(TARGET_BASE_ARCH), sparc) ifneq ($(TARGET_BASE_ARCH), sparc)
ifneq ($(TARGET_BASE_ARCH), alpha)
libobj-y += op_helper.o libobj-y += op_helper.o
endif endif
endif
libobj-y += helper.o libobj-y += helper.o
ifeq ($(TARGET_BASE_ARCH), i386) ifeq ($(TARGET_BASE_ARCH), i386)
libobj-y += cpuid.o libobj-y += cpuid.o
@ -96,7 +98,7 @@ libobj-y += cpu_init.o
endif endif
libobj-$(TARGET_SPARC) += int32_helper.o libobj-$(TARGET_SPARC) += int32_helper.o
libobj-$(TARGET_SPARC64) += int64_helper.o libobj-$(TARGET_SPARC64) += int64_helper.o
libobj-$(TARGET_ALPHA) += int_helper.o fpu_helper.o sys_helper.o libobj-$(TARGET_ALPHA) += int_helper.o fpu_helper.o sys_helper.o mem_helper.o
libobj-y += disas.o libobj-y += disas.o
libobj-$(CONFIG_TCI_DIS) += tci-dis.o libobj-$(CONFIG_TCI_DIS) += tci-dis.o

2
configure vendored
View File

@ -3608,7 +3608,7 @@ case "$target_arch2" in
esac esac
case "$target_arch2" in case "$target_arch2" in
sparc*) alpha | sparc*)
echo "CONFIG_TCG_PASS_AREG0=y" >> $config_target_mak echo "CONFIG_TCG_PASS_AREG0=y" >> $config_target_mak
;; ;;
esac esac

View File

@ -104,12 +104,12 @@ DEF_HELPER_2(hw_ret, void, env, i64)
DEF_HELPER_1(ldl_phys, i64, i64) DEF_HELPER_1(ldl_phys, i64, i64)
DEF_HELPER_1(ldq_phys, i64, i64) DEF_HELPER_1(ldq_phys, i64, i64)
DEF_HELPER_1(ldl_l_phys, i64, i64) DEF_HELPER_2(ldl_l_phys, i64, env, i64)
DEF_HELPER_1(ldq_l_phys, i64, i64) DEF_HELPER_2(ldq_l_phys, i64, env, i64)
DEF_HELPER_2(stl_phys, void, i64, i64) DEF_HELPER_2(stl_phys, void, i64, i64)
DEF_HELPER_2(stq_phys, void, i64, i64) DEF_HELPER_2(stq_phys, void, i64, i64)
DEF_HELPER_2(stl_c_phys, i64, i64, i64) DEF_HELPER_3(stl_c_phys, i64, env, i64, i64)
DEF_HELPER_2(stq_c_phys, i64, i64, i64) DEF_HELPER_3(stq_c_phys, i64, env, i64, i64)
DEF_HELPER_FLAGS_1(tbia, TCG_CALL_CONST, void, env) DEF_HELPER_FLAGS_1(tbia, TCG_CALL_CONST, void, env)
DEF_HELPER_FLAGS_2(tbis, TCG_CALL_CONST, void, env, i64) DEF_HELPER_FLAGS_2(tbis, TCG_CALL_CONST, void, env, i64)

View File

@ -1,5 +1,5 @@
/* /*
* Alpha emulation cpu micro-operations helpers for qemu. * Helpers for loads and stores
* *
* Copyright (c) 2007 Jocelyn Mayer * Copyright (c) 2007 Jocelyn Mayer
* *
@ -18,17 +18,12 @@
*/ */
#include "cpu.h" #include "cpu.h"
#include "dyngen-exec.h"
#include "host-utils.h"
#include "softfloat.h"
#include "helper.h" #include "helper.h"
#include "sysemu.h"
#include "qemu-timer.h"
/*****************************************************************************/
/* Softmmu support */ /* Softmmu support */
#if !defined (CONFIG_USER_ONLY) #ifndef CONFIG_USER_ONLY
uint64_t helper_ldl_phys(uint64_t p) uint64_t helper_ldl_phys(uint64_t p)
{ {
return (int32_t)ldl_phys(p); return (int32_t)ldl_phys(p);
@ -39,16 +34,16 @@ uint64_t helper_ldq_phys(uint64_t p)
return ldq_phys(p); return ldq_phys(p);
} }
uint64_t helper_ldl_l_phys(uint64_t p) uint64_t helper_ldl_l_phys(CPUAlphaState *env, uint64_t p)
{ {
env->lock_addr = p; env->lock_addr = p;
return env->lock_value = (int32_t)ldl_phys(p); return env->lock_value = (int32_t)ldl_phys(p);
} }
uint64_t helper_ldq_l_phys(uint64_t p) uint64_t helper_ldq_l_phys(CPUAlphaState *env, uint64_t p)
{ {
env->lock_addr = p; env->lock_addr = p;
return env->lock_value = ldl_phys(p); return env->lock_value = ldq_phys(p);
} }
void helper_stl_phys(uint64_t p, uint64_t v) void helper_stl_phys(uint64_t p, uint64_t v)
@ -61,7 +56,7 @@ void helper_stq_phys(uint64_t p, uint64_t v)
stq_phys(p, v); stq_phys(p, v);
} }
uint64_t helper_stl_c_phys(uint64_t p, uint64_t v) uint64_t helper_stl_c_phys(CPUAlphaState *env, uint64_t p, uint64_t v)
{ {
uint64_t ret = 0; uint64_t ret = 0;
@ -77,7 +72,7 @@ uint64_t helper_stl_c_phys(uint64_t p, uint64_t v)
return ret; return ret;
} }
uint64_t helper_stq_c_phys(uint64_t p, uint64_t v) uint64_t helper_stq_c_phys(CPUAlphaState *env, uint64_t p, uint64_t v)
{ {
uint64_t ret = 0; uint64_t ret = 0;
@ -93,8 +88,8 @@ uint64_t helper_stq_c_phys(uint64_t p, uint64_t v)
return ret; return ret;
} }
static void QEMU_NORETURN do_unaligned_access(target_ulong addr, int is_write, static void do_unaligned_access(CPUAlphaState *env, target_ulong addr,
int is_user, void *retaddr) int is_write, int is_user, void *retaddr)
{ {
uint64_t pc; uint64_t pc;
uint32_t insn; uint32_t insn;
@ -102,7 +97,7 @@ static void QEMU_NORETURN do_unaligned_access(target_ulong addr, int is_write,
do_restore_state(env, retaddr); do_restore_state(env, retaddr);
pc = env->pc; pc = env->pc;
insn = ldl_code(pc); insn = cpu_ldl_code(env, pc);
env->trap_arg0 = addr; env->trap_arg0 = addr;
env->trap_arg1 = insn >> 26; /* opcode */ env->trap_arg1 = insn >> 26; /* opcode */
@ -112,14 +107,12 @@ static void QEMU_NORETURN do_unaligned_access(target_ulong addr, int is_write,
cpu_loop_exit(env); cpu_loop_exit(env);
} }
void QEMU_NORETURN cpu_unassigned_access(CPUAlphaState *env1, void cpu_unassigned_access(CPUAlphaState *env, target_phys_addr_t addr,
target_phys_addr_t addr, int is_write, int is_write, int is_exec, int unused, int size)
int is_exec, int unused, int size)
{ {
env = env1;
env->trap_arg0 = addr; env->trap_arg0 = addr;
env->trap_arg1 = is_write; env->trap_arg1 = is_write;
dynamic_excp(env1, GETPC(), EXCP_MCHK, 0); dynamic_excp(env, NULL, EXCP_MCHK, 0);
} }
#include "softmmu_exec.h" #include "softmmu_exec.h"
@ -143,20 +136,16 @@ void QEMU_NORETURN cpu_unassigned_access(CPUAlphaState *env1,
NULL, it means that the function was called in C code (i.e. not NULL, it means that the function was called in C code (i.e. not
from generated code or from helper.c) */ from generated code or from helper.c) */
/* XXX: fix it to restore all registers */ /* XXX: fix it to restore all registers */
void tlb_fill(CPUAlphaState *env1, target_ulong addr, int is_write, int mmu_idx, void tlb_fill(CPUAlphaState *env, target_ulong addr, int is_write,
void *retaddr) int mmu_idx, void *retaddr)
{ {
CPUAlphaState *saved_env;
int ret; int ret;
saved_env = env;
env = env1;
ret = cpu_alpha_handle_mmu_fault(env, addr, is_write, mmu_idx); ret = cpu_alpha_handle_mmu_fault(env, addr, is_write, mmu_idx);
if (unlikely(ret != 0)) { if (unlikely(ret != 0)) {
do_restore_state(env, retaddr); do_restore_state(env, retaddr);
/* Exception index and error code are already set */ /* Exception index and error code are already set */
cpu_loop_exit(env); cpu_loop_exit(env);
} }
env = saved_env;
} }
#endif #endif /* CONFIG_USER_ONLY */

View File

@ -2867,11 +2867,11 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
break; break;
case 0x2: case 0x2:
/* Longword physical access with lock (hw_ldl_l/p) */ /* Longword physical access with lock (hw_ldl_l/p) */
gen_helper_ldl_l_phys(cpu_ir[ra], addr); gen_helper_ldl_l_phys(cpu_ir[ra], cpu_env, addr);
break; break;
case 0x3: case 0x3:
/* Quadword physical access with lock (hw_ldq_l/p) */ /* Quadword physical access with lock (hw_ldq_l/p) */
gen_helper_ldq_l_phys(cpu_ir[ra], addr); gen_helper_ldq_l_phys(cpu_ir[ra], cpu_env, addr);
break; break;
case 0x4: case 0x4:
/* Longword virtual PTE fetch (hw_ldl/v) */ /* Longword virtual PTE fetch (hw_ldl/v) */
@ -3180,11 +3180,11 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
break; break;
case 0x2: case 0x2:
/* Longword physical access with lock */ /* Longword physical access with lock */
gen_helper_stl_c_phys(val, addr, val); gen_helper_stl_c_phys(val, cpu_env, addr, val);
break; break;
case 0x3: case 0x3:
/* Quadword physical access with lock */ /* Quadword physical access with lock */
gen_helper_stq_c_phys(val, addr, val); gen_helper_stq_c_phys(val, cpu_env, addr, val);
break; break;
case 0x4: case 0x4:
/* Longword virtual access */ /* Longword virtual access */
@ -3420,7 +3420,7 @@ static inline void gen_intermediate_code_internal(CPUAlphaState *env,
} }
if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO)) if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
gen_io_start(); gen_io_start();
insn = ldl_code(ctx.pc); insn = cpu_ldl_code(env, ctx.pc);
num_insns++; num_insns++;
if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP))) { if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP))) {