implemented SVM decode assists. some is still missing - coming soon
This commit is contained in:
parent
9011005580
commit
8a01ee1661
@ -4012,11 +4012,12 @@ public: // for now...
|
||||
BX_CPP_AttrNoReturn();
|
||||
BX_SMF void init_SMRAM(void);
|
||||
BX_SMF int int_number(unsigned s);
|
||||
BX_SMF bx_bool SetCR0(bx_address val) BX_CPP_AttrRegparmN(1);
|
||||
|
||||
BX_SMF bx_bool SetCR0(bxInstruction_c *i, bx_address val);
|
||||
BX_SMF bx_bool check_CR0(bx_address val) BX_CPP_AttrRegparmN(1);
|
||||
BX_SMF bx_bool SetCR3(bx_address val) BX_CPP_AttrRegparmN(1);
|
||||
#if BX_CPU_LEVEL >= 5
|
||||
BX_SMF bx_bool SetCR4(bx_address val) BX_CPP_AttrRegparmN(1);
|
||||
BX_SMF bx_bool SetCR4(bxInstruction_c *i, bx_address val);
|
||||
BX_SMF bx_bool check_CR4(bx_address val) BX_CPP_AttrRegparmN(1);
|
||||
BX_SMF Bit32u get_cr4_allow_mask(void);
|
||||
#endif
|
||||
@ -4030,6 +4031,15 @@ public: // for now...
|
||||
BX_SMF bx_bool SetEFER(bx_address val) BX_CPP_AttrRegparmN(1);
|
||||
#endif
|
||||
|
||||
BX_SMF bx_address read_CR0(void);
|
||||
#if BX_CPU_LEVEL >= 5
|
||||
BX_SMF bx_address read_CR4(void);
|
||||
#endif
|
||||
#if BX_CPU_LEVEL >= 6
|
||||
BX_SMF Bit32u ReadCR8(bxInstruction_c *i);
|
||||
BX_SMF void WriteCR8(bxInstruction_c *i, bx_address val);
|
||||
#endif
|
||||
|
||||
BX_SMF void reset(unsigned source);
|
||||
BX_SMF void shutdown(void);
|
||||
BX_SMF void handleCpuModeChange(void);
|
||||
@ -4292,14 +4302,6 @@ public: // for now...
|
||||
BX_SMF void check_monitor(bx_phy_address addr, unsigned len);
|
||||
#endif
|
||||
|
||||
BX_SMF bx_address read_CR0(void);
|
||||
#if BX_CPU_LEVEL >= 5
|
||||
BX_SMF bx_address read_CR4(void);
|
||||
#endif
|
||||
#if BX_CPU_LEVEL >= 6
|
||||
BX_SMF Bit32u ReadCR8(bxInstruction_c *i);
|
||||
BX_SMF void WriteCR8(bxInstruction_c *i, bx_address val);
|
||||
#endif
|
||||
#if BX_SUPPORT_VMX
|
||||
BX_SMF Bit16u VMread16(unsigned encoding) BX_CPP_AttrRegparmN(1);
|
||||
BX_SMF Bit32u VMread32(unsigned encoding) BX_CPP_AttrRegparmN(1);
|
||||
|
@ -58,7 +58,8 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_DdRd(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if (SVM_DR_WRITE_INTERCEPTED(i->dst())) Svm_Vmexit(SVM_VMEXIT_DR0_WRITE + i->dst());
|
||||
if (SVM_DR_WRITE_INTERCEPTED(i->dst()))
|
||||
Svm_Vmexit(SVM_VMEXIT_DR0_WRITE + i->dst(), BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->src() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -163,7 +164,8 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RdDd(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if (SVM_DR_READ_INTERCEPTED(i->src())) Svm_Vmexit(SVM_VMEXIT_DR0_READ + i->src());
|
||||
if (SVM_DR_READ_INTERCEPTED(i->src()))
|
||||
Svm_Vmexit(SVM_VMEXIT_DR0_READ + i->src(), BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->dst() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -235,7 +237,8 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_DqRq(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if (SVM_DR_WRITE_INTERCEPTED(i->dst())) Svm_Vmexit(SVM_VMEXIT_DR0_WRITE + i->dst());
|
||||
if (SVM_DR_WRITE_INTERCEPTED(i->dst()))
|
||||
Svm_Vmexit(SVM_VMEXIT_DR0_WRITE + i->dst(), BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->src() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -341,7 +344,8 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RqDq(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if (SVM_DR_READ_INTERCEPTED(i->src())) Svm_Vmexit(SVM_VMEXIT_DR0_READ + i->src());
|
||||
if (SVM_DR_READ_INTERCEPTED(i->src()))
|
||||
Svm_Vmexit(SVM_VMEXIT_DR0_READ + i->src(), BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->dst() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -378,6 +382,8 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RqDq(bxInstruction_c *i)
|
||||
}
|
||||
#endif // #if BX_SUPPORT_X86_64
|
||||
|
||||
#define BX_SVM_CR_WRITE_MASK (BX_CONST64(1) << 63)
|
||||
|
||||
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_CR0Rd(bxInstruction_c *i)
|
||||
{
|
||||
// CPL is always 0 in real mode
|
||||
@ -396,7 +402,7 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_CR0Rd(bxInstruction_c *i)
|
||||
if (BX_CPU_THIS_PTR in_vmx_guest)
|
||||
val_32 = (Bit32u) VMexit_CR0_Write(i, val_32);
|
||||
#endif
|
||||
if (! SetCR0(val_32))
|
||||
if (! SetCR0(i, val_32))
|
||||
exception(BX_GP_EXCEPTION, 0);
|
||||
|
||||
BX_INSTR_TLB_CNTRL(BX_CPU_ID, BX_INSTR_MOV_CR0, val_32);
|
||||
@ -421,7 +427,12 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_CR2Rd(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_WRITE_INTERCEPTED(2)) Svm_Vmexit(SVM_VMEXIT_CR2_WRITE);
|
||||
if(SVM_CR_WRITE_INTERCEPTED(2)) {
|
||||
if (BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR2_WRITE, BX_SVM_CR_WRITE_MASK | i->src());
|
||||
else
|
||||
Svm_Vmexit(SVM_VMEXIT_CR2_WRITE);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -447,10 +458,21 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_CR3Rd(bxInstruction_c *i)
|
||||
VMexit_CR3_Write(i, val_32);
|
||||
#endif
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_WRITE_INTERCEPTED(3)) {
|
||||
if (BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR3_WRITE, BX_SVM_CR_WRITE_MASK | i->src());
|
||||
else
|
||||
Svm_Vmexit(SVM_VMEXIT_CR3_WRITE);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#if BX_CPU_LEVEL >= 6
|
||||
if (BX_CPU_THIS_PTR cr0.get_PG() && BX_CPU_THIS_PTR cr4.get_PAE() && !long_mode()) {
|
||||
if (! CheckPDPTR(val_32)) {
|
||||
BX_ERROR(("SetCR3(): PDPTR check failed !"));
|
||||
BX_ERROR(("MOV_CR3Rd(): PDPTR check failed !"));
|
||||
exception(BX_GP_EXCEPTION, 0);
|
||||
}
|
||||
}
|
||||
@ -480,7 +502,7 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_CR4Rd(bxInstruction_c *i)
|
||||
if (BX_CPU_THIS_PTR in_vmx_guest)
|
||||
val_32 = (Bit32u) VMexit_CR4_Write(i, val_32);
|
||||
#endif
|
||||
if (! SetCR4(val_32))
|
||||
if (! SetCR4(i, val_32))
|
||||
exception(BX_GP_EXCEPTION, 0);
|
||||
|
||||
BX_INSTR_TLB_CNTRL(BX_CPU_ID, BX_INSTR_MOV_CR4, val_32);
|
||||
@ -501,6 +523,13 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RdCR0(bxInstruction_c *i)
|
||||
|
||||
if (i->src() == 0) {
|
||||
// CR0
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_READ_INTERCEPTED(0))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR0_READ, BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->dst() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
val_32 = (Bit32u) read_CR0(); /* correctly handle VMX */
|
||||
}
|
||||
#if BX_CPU_LEVEL >= 6
|
||||
@ -525,7 +554,8 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RdCR2(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_READ_INTERCEPTED(2)) Svm_Vmexit(SVM_VMEXIT_CR2_READ);
|
||||
if(SVM_CR_READ_INTERCEPTED(2))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR2_READ, BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->dst() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -544,7 +574,8 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RdCR3(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_READ_INTERCEPTED(3)) Svm_Vmexit(SVM_VMEXIT_CR3_READ);
|
||||
if(SVM_CR_READ_INTERCEPTED(3))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR3_READ, BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->dst() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -569,6 +600,13 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RdCR4(bxInstruction_c *i)
|
||||
exception(BX_GP_EXCEPTION, 0);
|
||||
}
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_READ_INTERCEPTED(4))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR4_READ, BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->dst() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
Bit32u val_32 = (Bit32u) read_CR4(); /* correctly handle VMX */
|
||||
|
||||
BX_WRITE_32BIT_REGZ(i->dst(), val_32);
|
||||
@ -595,7 +633,7 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_CR0Rq(bxInstruction_c *i)
|
||||
if (BX_CPU_THIS_PTR in_vmx_guest)
|
||||
val_64 = VMexit_CR0_Write(i, val_64);
|
||||
#endif
|
||||
if (! SetCR0(val_64))
|
||||
if (! SetCR0(i, val_64))
|
||||
exception(BX_GP_EXCEPTION, 0);
|
||||
|
||||
BX_INSTR_TLB_CNTRL(BX_CPU_ID, BX_INSTR_MOV_CR0, (Bit32u) val_64);
|
||||
@ -621,7 +659,12 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_CR2Rq(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_WRITE_INTERCEPTED(2)) Svm_Vmexit(SVM_VMEXIT_CR2_WRITE);
|
||||
if(SVM_CR_WRITE_INTERCEPTED(2)) {
|
||||
if (BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR2_WRITE, BX_SVM_CR_WRITE_MASK | i->src());
|
||||
else
|
||||
Svm_Vmexit(SVM_VMEXIT_CR2_WRITE);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -651,6 +694,17 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_CR3Rq(bxInstruction_c *i)
|
||||
VMexit_CR3_Write(i, val_64);
|
||||
#endif
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_WRITE_INTERCEPTED(3)) {
|
||||
if (BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR3_WRITE, BX_SVM_CR_WRITE_MASK | i->src());
|
||||
else
|
||||
Svm_Vmexit(SVM_VMEXIT_CR3_WRITE);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// no PDPTR checks in long mode
|
||||
if (! SetCR3(val_64))
|
||||
exception(BX_GP_EXCEPTION, 0);
|
||||
@ -679,7 +733,7 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_CR4Rq(bxInstruction_c *i)
|
||||
if (BX_CPU_THIS_PTR in_vmx_guest)
|
||||
val_64 = VMexit_CR4_Write(i, val_64);
|
||||
#endif
|
||||
if (! SetCR4(val_64))
|
||||
if (! SetCR4(i, val_64))
|
||||
exception(BX_GP_EXCEPTION, 0);
|
||||
|
||||
BX_INSTR_TLB_CNTRL(BX_CPU_ID, BX_INSTR_MOV_CR4, (Bit32u) val_64);
|
||||
@ -698,6 +752,13 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RqCR0(bxInstruction_c *i)
|
||||
|
||||
if (i->src() == 0) {
|
||||
// CR0
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_READ_INTERCEPTED(0))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR0_READ, BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->dst() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
val_64 = read_CR0(); /* correctly handle VMX */
|
||||
}
|
||||
else {
|
||||
@ -724,7 +785,8 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RqCR2(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_READ_INTERCEPTED(2)) Svm_Vmexit(SVM_VMEXIT_CR2_READ);
|
||||
if(SVM_CR_READ_INTERCEPTED(2))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR2_READ, BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->dst() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -747,7 +809,8 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RqCR3(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_READ_INTERCEPTED(3)) Svm_Vmexit(SVM_VMEXIT_CR3_READ);
|
||||
if(SVM_CR_READ_INTERCEPTED(3))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR3_READ, BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->dst() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -773,6 +836,13 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MOV_RqCR4(bxInstruction_c *i)
|
||||
exception(BX_GP_EXCEPTION, 0);
|
||||
}
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_READ_INTERCEPTED(4))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR4_READ, BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? i->dst() : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
Bit64u val_64 = read_CR4(); /* correctly handle VMX */
|
||||
|
||||
BX_WRITE_64BIT_REG(i->dst(), val_64);
|
||||
@ -821,7 +891,7 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::LMSW_Ew(bxInstruction_c *i)
|
||||
msw &= 0xf; // LMSW only affects last 4 flags
|
||||
|
||||
Bit32u cr0 = (BX_CPU_THIS_PTR cr0.get32() & 0xfffffff0) | msw;
|
||||
if (! SetCR0(cr0))
|
||||
if (! SetCR0(i, cr0))
|
||||
exception(BX_GP_EXCEPTION, 0);
|
||||
|
||||
BX_NEXT_TRACE(i);
|
||||
@ -852,14 +922,15 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::SMSW_EwM(bxInstruction_c *i)
|
||||
|
||||
bx_address BX_CPU_C::read_CR0(void)
|
||||
{
|
||||
bx_address cr0_val = BX_CPU_THIS_PTR cr0.get32();
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
// used for SMSW instruction only
|
||||
if(SVM_CR_READ_INTERCEPTED(0)) Svm_Vmexit(SVM_VMEXIT_CR0_READ);
|
||||
}
|
||||
#endif
|
||||
|
||||
bx_address cr0_val = BX_CPU_THIS_PTR cr0.get32();
|
||||
|
||||
#if BX_SUPPORT_VMX
|
||||
if (BX_CPU_THIS_PTR in_vmx_guest) {
|
||||
VMCS_CACHE *vm = &BX_CPU_THIS_PTR vmcs;
|
||||
@ -875,12 +946,6 @@ bx_address BX_CPU_C::read_CR4(void)
|
||||
{
|
||||
bx_address cr4_val = BX_CPU_THIS_PTR cr4.get32();
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_READ_INTERCEPTED(4)) Svm_Vmexit(SVM_VMEXIT_CR4_READ);
|
||||
}
|
||||
#endif
|
||||
|
||||
#if BX_SUPPORT_VMX
|
||||
if (BX_CPU_THIS_PTR in_vmx_guest) {
|
||||
VMCS_CACHE *vm = &BX_CPU_THIS_PTR vmcs;
|
||||
@ -935,7 +1000,7 @@ bx_bool BX_CPP_AttrRegparmN(1) BX_CPU_C::check_CR0(bx_address cr0_val)
|
||||
return 1;
|
||||
}
|
||||
|
||||
bx_bool BX_CPP_AttrRegparmN(1) BX_CPU_C::SetCR0(bx_address val)
|
||||
bx_bool BX_CPU_C::SetCR0(bxInstruction_c *i, bx_address val)
|
||||
{
|
||||
if (! check_CR0(val)) return 0;
|
||||
|
||||
@ -998,7 +1063,13 @@ bx_bool BX_CPP_AttrRegparmN(1) BX_CPU_C::SetCR0(bx_address val)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_WRITE_INTERCEPTED(0)) Svm_Vmexit(SVM_VMEXIT_CR0_WRITE);
|
||||
if(SVM_CR_WRITE_INTERCEPTED(0)) {
|
||||
// LMSW instruction should VMEXIT before
|
||||
if (BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR0_WRITE, BX_SVM_CR_WRITE_MASK | i->src());
|
||||
else
|
||||
Svm_Vmexit(SVM_VMEXIT_CR0_WRITE);
|
||||
}
|
||||
|
||||
if (SVM_INTERCEPT(SVM_INTERCEPT0_CR0_WRITE_NO_TS_MP)) {
|
||||
if ((oldCR0 & 0xfffffff5) != (val_32 & 0xfffffff5)) {
|
||||
@ -1154,9 +1225,8 @@ bx_bool BX_CPP_AttrRegparmN(1) BX_CPU_C::check_CR4(bx_address cr4_val)
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (temp_cr4.get_PCIDE()) {
|
||||
if (! long_mode()) {
|
||||
else {
|
||||
if (temp_cr4.get_PCIDE()) {
|
||||
BX_ERROR(("check_CR4(): attempt to set CR4.PCIDE when EFER.LMA=0"));
|
||||
return 0;
|
||||
}
|
||||
@ -1181,16 +1251,10 @@ bx_bool BX_CPP_AttrRegparmN(1) BX_CPU_C::check_CR4(bx_address cr4_val)
|
||||
return 1;
|
||||
}
|
||||
|
||||
bx_bool BX_CPP_AttrRegparmN(1) BX_CPU_C::SetCR4(bx_address val)
|
||||
bx_bool BX_CPU_C::SetCR4(bxInstruction_c *i, bx_address val)
|
||||
{
|
||||
if (! check_CR4(val)) return 0;
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_WRITE_INTERCEPTED(4)) Svm_Vmexit(SVM_VMEXIT_CR4_WRITE);
|
||||
}
|
||||
#endif
|
||||
|
||||
#if BX_CPU_LEVEL >= 6
|
||||
// Modification of PGE,PAE,PSE,PCIDE,SMEP flushes TLB cache according to docs.
|
||||
if ((val & BX_CR4_FLUSH_TLB_MASK) != (BX_CPU_THIS_PTR cr4.val32 & BX_CR4_FLUSH_TLB_MASK)) {
|
||||
@ -1216,6 +1280,17 @@ bx_bool BX_CPP_AttrRegparmN(1) BX_CPU_C::SetCR4(bx_address val)
|
||||
}
|
||||
#endif
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_WRITE_INTERCEPTED(4)) {
|
||||
if (BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR4_WRITE, BX_SVM_CR_WRITE_MASK | i->src());
|
||||
else
|
||||
Svm_Vmexit(SVM_VMEXIT_CR4_WRITE);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
BX_CPU_THIS_PTR cr4.set32((Bit32u) val);
|
||||
|
||||
#if BX_CPU_LEVEL >= 6
|
||||
@ -1240,12 +1315,6 @@ bx_bool BX_CPP_AttrRegparmN(1) BX_CPU_C::SetCR3(bx_address val)
|
||||
}
|
||||
#endif
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if(SVM_CR_WRITE_INTERCEPTED(3)) Svm_Vmexit(SVM_VMEXIT_CR3_WRITE);
|
||||
}
|
||||
#endif
|
||||
|
||||
BX_CPU_THIS_PTR cr3 = val;
|
||||
|
||||
// flush TLB even if value does not change
|
||||
@ -1292,7 +1361,12 @@ void BX_CPU_C::WriteCR8(bxInstruction_c *i, bx_address val)
|
||||
{
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if (SVM_CR_WRITE_INTERCEPTED(8)) Svm_Vmexit(SVM_VMEXIT_CR8_WRITE);
|
||||
if(SVM_CR_WRITE_INTERCEPTED(8)) {
|
||||
if (BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST))
|
||||
Svm_Vmexit(SVM_VMEXIT_CR8_WRITE, BX_SVM_CR_WRITE_MASK | i->src());
|
||||
else
|
||||
Svm_Vmexit(SVM_VMEXIT_CR8_WRITE);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -490,16 +490,14 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::INVLPG(bxInstruction_c* i)
|
||||
|
||||
#if BX_SUPPORT_VMX
|
||||
if (BX_CPU_THIS_PTR in_vmx_guest) {
|
||||
if (VMEXIT(VMX_VM_EXEC_CTRL2_INVLPG_VMEXIT)) {
|
||||
BX_ERROR(("VMEXIT: INVLPG 0x" FMT_ADDRX, laddr));
|
||||
VMexit(VMX_VMEXIT_INVLPG, laddr);
|
||||
}
|
||||
if (VMEXIT(VMX_VM_EXEC_CTRL2_INVLPG_VMEXIT)) VMexit(VMX_VMEXIT_INVLPG, laddr);
|
||||
}
|
||||
#endif
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if (SVM_INTERCEPT(SVM_INTERCEPT0_INVLPG)) Svm_Vmexit(SVM_VMEXIT_INVLPG);
|
||||
if (SVM_INTERCEPT(SVM_INTERCEPT0_INVLPG))
|
||||
Svm_Vmexit(SVM_VMEXIT_INVLPG, BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? laddr : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -662,6 +662,15 @@ bx_bool BX_CPU_C::resume_from_system_management_mode(BX_SMM_State *smm_state)
|
||||
}
|
||||
#endif
|
||||
|
||||
#if BX_CPU_LEVEL >= 5
|
||||
if (smm_state->efer.get32() & ~((Bit64u) BX_CPU_THIS_PTR efer_suppmask)) {
|
||||
BX_PANIC(("SMM restore: Attempt to set EFER reserved bits: 0x%08x !", smm_state->efer.get32()));
|
||||
return 0;
|
||||
}
|
||||
|
||||
BX_CPU_THIS_PTR efer.set32(smm_state->efer.val32);
|
||||
#endif
|
||||
|
||||
// check CR0 conditions for entering to shutdown state
|
||||
if (!check_CR0(smm_state->cr0.get32())) {
|
||||
BX_PANIC(("SMM restore: CR0 consistency check failed !"));
|
||||
@ -673,19 +682,13 @@ bx_bool BX_CPU_C::resume_from_system_management_mode(BX_SMM_State *smm_state)
|
||||
BX_PANIC(("SMM restore: CR4 consistency check failed !"));
|
||||
return 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
// shutdown if write to reserved CR4 bits
|
||||
if (!SetCR4(smm_state->cr4.get32())) {
|
||||
BX_PANIC(("SMM restore: incorrect CR4 state !"));
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (smm_state->efer.get32() & ~((Bit64u) BX_CPU_THIS_PTR efer_suppmask)) {
|
||||
BX_PANIC(("SMM restore: Attempt to set EFER reserved bits: 0x%08x !", smm_state->efer.get32()));
|
||||
return 0;
|
||||
}
|
||||
|
||||
BX_CPU_THIS_PTR efer.set32(smm_state->efer.val32);
|
||||
BX_CPU_THIS_PTR cr0.set32(smm_state->cr0.get32());
|
||||
#if BX_CPU_LEVEL >= 5
|
||||
BX_CPU_THIS_PTR cr4.set32(smm_state->cr4.get32());
|
||||
#endif
|
||||
BX_CPU_THIS_PTR cr3 = smm_state->cr3;
|
||||
|
||||
#if BX_SUPPORT_X86_64
|
||||
if (BX_CPU_THIS_PTR efer.get_LMA()) {
|
||||
@ -694,7 +697,7 @@ bx_bool BX_CPU_C::resume_from_system_management_mode(BX_SMM_State *smm_state)
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!BX_CPU_THIS_PTR cr4.get_PAE() || !smm_state->cr0.get_PG() || !smm_state->cr0.get_PE() || !BX_CPU_THIS_PTR efer.get_LME()) {
|
||||
if (!BX_CPU_THIS_PTR cr4.get_PAE() || !BX_CPU_THIS_PTR cr0.get_PG() || !BX_CPU_THIS_PTR cr0.get_PE() || !BX_CPU_THIS_PTR efer.get_LME()) {
|
||||
BX_PANIC(("SMM restore: If EFER.LMA = 1 <=> CR4.PAE, CR0.PG, CR0.PE, EFER.LME=1 !"));
|
||||
return 0;
|
||||
}
|
||||
@ -706,7 +709,7 @@ bx_bool BX_CPU_C::resume_from_system_management_mode(BX_SMM_State *smm_state)
|
||||
}
|
||||
}
|
||||
|
||||
if (BX_CPU_THIS_PTR cr4.get_PAE() && smm_state->cr0.get_PG() && smm_state->cr0.get_PE() && BX_CPU_THIS_PTR efer.get_LME()) {
|
||||
if (BX_CPU_THIS_PTR cr4.get_PAE() && BX_CPU_THIS_PTR cr0.get_PG() && BX_CPU_THIS_PTR cr0.get_PE() && BX_CPU_THIS_PTR efer.get_LME()) {
|
||||
if (! BX_CPU_THIS_PTR efer.get_LMA()) {
|
||||
BX_PANIC(("SMM restore: If EFER.LMA = 1 <=> CR4.PAE, CR0.PG, CR0.PE, EFER.LME=1 !"));
|
||||
return 0;
|
||||
@ -714,22 +717,6 @@ bx_bool BX_CPU_C::resume_from_system_management_mode(BX_SMM_State *smm_state)
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
// hack CR0 to be able to back to long mode correctly
|
||||
BX_CPU_THIS_PTR cr0.set_PE(0); // real mode (bit 0)
|
||||
BX_CPU_THIS_PTR cr0.set_PG(0); // paging disabled (bit 31)
|
||||
if (! SetCR0(smm_state->cr0.get32())) {
|
||||
BX_PANIC(("SMM restore: failed to restore CR0 !"));
|
||||
return 0;
|
||||
}
|
||||
setEFlags(smm_state->eflags);
|
||||
|
||||
if (!SetCR3(smm_state->cr3)) {
|
||||
BX_PANIC(("SMM restore: failed to restore CR3 !"));
|
||||
return 0;
|
||||
}
|
||||
|
||||
#if BX_CPU_LEVEL >= 6
|
||||
if (BX_CPU_THIS_PTR cr0.get_PG() && BX_CPU_THIS_PTR cr4.get_PAE() && !long_mode()) {
|
||||
if (! CheckPDPTR(smm_state->cr3)) {
|
||||
@ -739,6 +726,8 @@ bx_bool BX_CPU_C::resume_from_system_management_mode(BX_SMM_State *smm_state)
|
||||
}
|
||||
#endif
|
||||
|
||||
setEFlags(smm_state->eflags);
|
||||
|
||||
#if BX_SUPPORT_X86_64
|
||||
for (int n=0; n<BX_GENERAL_REGISTERS; n++)
|
||||
BX_WRITE_64BIT_REG(n, smm_state->gen_reg[n]);
|
||||
|
@ -125,7 +125,8 @@ BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::INT_Ib(bxInstruction_c *i)
|
||||
|
||||
#if BX_SUPPORT_SVM
|
||||
if (BX_CPU_THIS_PTR in_svm_guest) {
|
||||
if (SVM_INTERCEPT(SVM_INTERCEPT0_SOFTINT)) Svm_Vmexit(SVM_VMEXIT_SOFTWARE_INTERRUPT);
|
||||
if (SVM_INTERCEPT(SVM_INTERCEPT0_SOFTINT))
|
||||
Svm_Vmexit(SVM_VMEXIT_SOFTWARE_INTERRUPT, BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST) ? vector : 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -588,6 +588,28 @@ void BX_CPU_C::Svm_Vmexit(int reason, Bit64u exitinfo1, Bit64u exitinfo2)
|
||||
if (BX_CPU_THIS_PTR speculative_rsp)
|
||||
RSP = BX_CPU_THIS_PTR prev_rsp;
|
||||
|
||||
if (BX_SUPPORT_SVM_EXTENSION(BX_CPUID_SVM_DECODE_ASSIST)) {
|
||||
//
|
||||
// In the case of a Nested #PF or intercepted #PF, guest instruction bytes at
|
||||
// guest CS:RIP are stored into the 16-byte wide field Guest Instruction Bytes.
|
||||
// Up to 15 bytes are recorded, read from guest CS:RIP. The number of bytes
|
||||
// fetched is put into the first byte of this field. Zero indicates that no
|
||||
// bytes were fetched.
|
||||
//
|
||||
// This field is filled in only during data page faults. Instruction-fetch
|
||||
// page faults provide no additional information. All other intercepts clear
|
||||
// bits 0:7 in this field to zero.
|
||||
//
|
||||
|
||||
if ((reason == SVM_VMEXIT_PF_EXCEPTION || reason == SVM_VMEXIT_NPF) && !(exitinfo1 & 0x10))
|
||||
{
|
||||
// TODO
|
||||
}
|
||||
else {
|
||||
vmcb_write8(SVM_CONTROL64_GUEST_INSTR_BYTES, 0);
|
||||
}
|
||||
}
|
||||
|
||||
mask_event(BX_EVENT_SVM_VIRQ_PENDING);
|
||||
|
||||
BX_CPU_THIS_PTR in_svm_guest = 0;
|
||||
|
@ -42,6 +42,7 @@ enum SVM_intercept_codes {
|
||||
SVM_VMEXIT_DR0_READ = 32,
|
||||
SVM_VMEXIT_DR0_WRITE = 48,
|
||||
SVM_VMEXIT_EXCEPTION = 64,
|
||||
SVM_VMEXIT_PF_EXCEPTION = (64+BX_PF_EXCEPTION),
|
||||
SVM_VMEXIT_INTR = 96,
|
||||
SVM_VMEXIT_NMI = 97,
|
||||
SVM_VMEXIT_SMI = 98,
|
||||
@ -132,6 +133,9 @@ enum SVM_intercept_codes {
|
||||
#define SVM_CONTROL32_VMCB_CLEAN_BITS (0x0c0)
|
||||
#define SVM_CONTROL64_NRIP (0x0c8)
|
||||
|
||||
#define SVM_CONTROL64_GUEST_INSTR_BYTES (0x0d0)
|
||||
#define SVM_CONTROL64_GUEST_INSTR_BYTES_HI (0x0d8)
|
||||
|
||||
// ======================
|
||||
// VMCB save state area
|
||||
// ======================
|
||||
|
@ -2255,7 +2255,7 @@ void BX_CPU_C::VMexit(Bit32u reason, Bit64u qualification)
|
||||
if (reason >= VMX_VMEXIT_LAST_REASON)
|
||||
BX_PANIC(("PANIC: broken VMEXIT reason %d", reason));
|
||||
else
|
||||
BX_DEBUG(("VMEXIT reason = %d (%s)", reason, VMX_vmexit_reason_name[reason]));
|
||||
BX_DEBUG(("VMEXIT reason = %d (%s) qualification=0x" FMT_LL "x", reason, VMX_vmexit_reason_name[reason], qualification));
|
||||
|
||||
if (reason != VMX_VMEXIT_EXCEPTION_NMI && reason != VMX_VMEXIT_EXTERNAL_INTERRUPT) {
|
||||
VMwrite32(VMCS_32BIT_VMEXIT_INTERRUPTION_INFO, 0);
|
||||
|
@ -209,13 +209,7 @@ void bx_load_linux_hack(void)
|
||||
|
||||
// Enter protected mode
|
||||
// Fixed by george (kyriazis at nvidia.com)
|
||||
// BX_CPU(0)->cr0.pe = 1;
|
||||
// BX_CPU(0)->cr0.val32 |= 0x01;
|
||||
|
||||
if (! BX_CPU(0)->SetCR0(BX_CPU(0)->cr0.val32 | 0x01)) {
|
||||
BX_INFO(("bx_load_linux_hack: can't enable protected mode in CR0"));
|
||||
BX_EXIT(1);
|
||||
}
|
||||
BX_CPU(0)->cr0.set_PE(1); // protected mode
|
||||
|
||||
// load esi with real_mode
|
||||
BX_CPU(0)->gen_reg[BX_32BIT_REG_ESI].dword.erx = 0x90000;
|
||||
|
Loading…
x
Reference in New Issue
Block a user