summaryrefslogtreecommitdiff
path: root/src/VBox/VMM/VMMSwitcher/AMD64andLegacy.mac
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@baserock.org>2014-03-26 19:21:20 +0000
committer <>2014-05-08 15:03:54 +0000
commitfb123f93f9f5ce42c8e5785d2f8e0edaf951740e (patch)
treec2103d76aec5f1f10892cd1d3a38e24f665ae5db /src/VBox/VMM/VMMSwitcher/AMD64andLegacy.mac
parent58ed4748338f9466599adfc8a9171280ed99e23f (diff)
downloadVirtualBox-master.tar.gz
Imported from /home/lorry/working-area/delta_VirtualBox/VirtualBox-4.3.10.tar.bz2.HEADVirtualBox-4.3.10master
Diffstat (limited to 'src/VBox/VMM/VMMSwitcher/AMD64andLegacy.mac')
-rw-r--r--src/VBox/VMM/VMMSwitcher/AMD64andLegacy.mac189
1 files changed, 150 insertions, 39 deletions
diff --git a/src/VBox/VMM/VMMSwitcher/AMD64andLegacy.mac b/src/VBox/VMM/VMMSwitcher/AMD64andLegacy.mac
index 20f9b7ee..5da86a9f 100644
--- a/src/VBox/VMM/VMMSwitcher/AMD64andLegacy.mac
+++ b/src/VBox/VMM/VMMSwitcher/AMD64andLegacy.mac
@@ -4,7 +4,7 @@
;
;
-; Copyright (C) 2006-2012 Oracle Corporation
+; Copyright (C) 2006-2013 Oracle Corporation
;
; This file is part of VirtualBox Open Source Edition (OSE), as
; available from http://www.virtualbox.org. This file is free software;
@@ -101,8 +101,15 @@ BEGINPROC vmmR0ToRawMode
%ifdef VBOX_WITH_VMMR0_DISABLE_LAPIC_NMI
; Unblock Local APIC NMI vectors
; Do this here to ensure the host CS is already restored
- mov ecx, [rdx + CPUM.fApicDisVectors]
- mov r8, [rdx + CPUM.pvApicBase]
+ mov r8d, [rdx + CPUM.offCPUMCPU0]
+ mov ecx, [rdx + r8 + CPUMCPU.fApicDisVectors]
+ test ecx, ecx
+ jz gth64_apic_done
+ cmp byte [rdx + r8 + CPUMCPU.fX2Apic], 1
+ je gth64_x2apic
+
+ ; Legacy APIC mode:
+ mov r8, [rdx + r8 + CPUMCPU.pvApicBase]
shr ecx, 1
jnc gth64_nolint0
and dword [r8 + APIC_REG_LVT_LINT0], ~APIC_REG_LVT_MASKED
@@ -119,6 +126,43 @@ gth64_nopc:
jnc gth64_notherm
and dword [r8 + APIC_REG_LVT_THMR], ~APIC_REG_LVT_MASKED
gth64_notherm:
+ jmp gth64_apic_done
+
+ ; X2 APIC mode:
+gth64_x2apic:
+ mov r8, rax ; save rax
+ mov r10, rcx
+ shr r10d, 1
+ jnc gth64_x2_nolint0
+ mov ecx, MSR_IA32_X2APIC_START + (APIC_REG_LVT_LINT0 >> 4)
+ rdmsr
+ and eax, ~APIC_REG_LVT_MASKED
+ wrmsr
+gth64_x2_nolint0:
+ shr r10d, 1
+ jnc gth64_x2_nolint1
+ mov ecx, MSR_IA32_X2APIC_START + (APIC_REG_LVT_LINT1 >> 4)
+ rdmsr
+ and eax, ~APIC_REG_LVT_MASKED
+ wrmsr
+gth64_x2_nolint1:
+ shr r10d, 1
+ jnc gth64_x2_nopc
+ mov ecx, MSR_IA32_X2APIC_START + (APIC_REG_LVT_PC >> 4)
+ rdmsr
+ and eax, ~APIC_REG_LVT_MASKED
+ wrmsr
+gth64_x2_nopc:
+ shr r10d, 1
+ jnc gth64_x2_notherm
+ mov ecx, MSR_IA32_X2APIC_START + (APIC_REG_LVT_THMR >> 4)
+ rdmsr
+ and eax, ~APIC_REG_LVT_MASKED
+ wrmsr
+gth64_x2_notherm:
+ mov rax, r8 ; restore rax
+
+gth64_apic_done:
%endif
%ifdef VBOX_WITH_STATISTICS
@@ -280,10 +324,12 @@ BEGINPROC vmmR0ToRawModeAsm
%ifdef VBOX_WITH_VMMR0_DISABLE_LAPIC_NMI
; Block Local APIC NMI vectors
- mov rbx, [rdx + CPUM.pvApicBase]
+ cmp byte [rdx + r8 + CPUMCPU.fX2Apic], 1
+ je htg_x2apic
+ mov rbx, [rdx + r8 + CPUMCPU.pvApicBase]
or rbx, rbx
- jz htg_noapic
- xor edi, edi
+ jz htg_apic_done
+ xor edi, edi ; fApicDisVectors
mov eax, [rbx + APIC_REG_LVT_LINT0]
mov ecx, eax
and ecx, (APIC_REG_LVT_MASKED | APIC_REG_LVT_MODE_MASK)
@@ -328,8 +374,63 @@ htg_nopc:
mov [rbx + APIC_REG_LVT_THMR], eax
mov eax, [rbx + APIC_REG_LVT_THMR] ; write completion
htg_notherm:
- mov [rdx + CPUM.fApicDisVectors], edi
-htg_noapic:
+ mov [rdx + r8 + CPUMCPU.fApicDisVectors], edi
+ jmp htg_apic_done
+
+ ; X2APIC?
+htg_x2apic:
+ mov r15, rdx ; save rdx
+ xor edi, edi ; fApicDisVectors
+
+ mov ecx, MSR_IA32_X2APIC_START + (APIC_REG_LVT_LINT0 >> 4)
+ rdmsr
+ mov ebx, eax
+ and ebx, (APIC_REG_LVT_MASKED | APIC_REG_LVT_MODE_MASK)
+ cmp ebx, APIC_REG_LVT_MODE_NMI
+ jne htg_x2_nolint0
+ or edi, 0x01
+ or eax, APIC_REG_LVT_MASKED
+ wrmsr
+htg_x2_nolint0:
+ mov ecx, MSR_IA32_X2APIC_START + (APIC_REG_LVT_LINT1 >> 4)
+ rdmsr
+ mov ebx, eax
+ and ebx, (APIC_REG_LVT_MASKED | APIC_REG_LVT_MODE_MASK)
+ cmp ebx, APIC_REG_LVT_MODE_NMI
+ jne htg_x2_nolint1
+ or edi, 0x02
+ or eax, APIC_REG_LVT_MASKED
+ wrmsr
+htg_x2_nolint1:
+ mov ecx, MSR_IA32_X2APIC_START + (APIC_REG_LVT_PC >> 4)
+ rdmsr
+ mov ebx, eax
+ and ebx, (APIC_REG_LVT_MASKED | APIC_REG_LVT_MODE_MASK)
+ cmp ebx, APIC_REG_LVT_MODE_NMI
+ jne htg_x2_nopc
+ or edi, 0x04
+ or eax, APIC_REG_LVT_MASKED
+ wrmsr
+htg_x2_nopc:
+ mov ecx, MSR_IA32_X2APIC_START + (APIC_REG_VERSION >> 4)
+ rdmsr
+ shr eax, 16
+ cmp al, 5
+ jb htg_x2_notherm
+ mov ecx, MSR_IA32_X2APIC_START + (APIC_REG_LVT_THMR >> 4)
+ rdmsr
+ mov ebx, eax
+ and ebx, (APIC_REG_LVT_MASKED | APIC_REG_LVT_MODE_MASK)
+ cmp ebx, APIC_REG_LVT_MODE_NMI
+ jne htg_x2_notherm
+ or edi, 0x08
+ or eax, APIC_REG_LVT_MASKED
+ wrmsr
+htg_x2_notherm:
+ mov rdx, r15
+ mov [rdx + r8 + CPUMCPU.fApicDisVectors], edi
+htg_apic_done:
+
%endif ; VBOX_WITH_VMMR0_DISABLE_LAPIC_NMI
FIXUP FIX_NO_SYSENTER_JMP, 0, htg_no_sysenter - NAME(Start) ; this will insert a jmp htg_no_sysenter if host doesn't use sysenter.
@@ -354,9 +455,8 @@ htg_no_sysenter:
mov [rdx + r8 + CPUMCPU.fUseFlags], esi
; debug registers.
- test esi, CPUM_USE_DEBUG_REGS | CPUM_USE_DEBUG_REGS_HOST
- jz htg_debug_regs_no
- jmp htg_debug_regs_save
+ test esi, CPUM_USE_DEBUG_REGS_HYPER | CPUM_USE_DEBUG_REGS_HOST
+ jnz htg_debug_regs_save
htg_debug_regs_no:
DEBUG_CHAR('a') ; trashes esi
@@ -438,13 +538,16 @@ htg_debug_regs_save:
DEBUG_S_CHAR('s');
mov rax, dr7 ; not sure, but if I read the docs right this will trap if GD is set. FIXME!!!
mov [rdx + r8 + CPUMCPU.Host.dr7], rax
- xor eax, eax ; clear everything. (bit 12? is read as 1...)
- mov dr7, rax
+ mov ecx, X86_DR7_INIT_VAL
+ cmp eax, ecx
+ je .htg_debug_regs_dr7_disabled
+ mov dr7, rcx
+.htg_debug_regs_dr7_disabled:
mov rax, dr6 ; just in case we save the state register too.
mov [rdx + r8 + CPUMCPU.Host.dr6], rax
; save host DR0-3?
- test esi, CPUM_USE_DEBUG_REGS
- jz near htg_debug_regs_no
+ test esi, CPUM_USE_DEBUG_REGS_HYPER
+ jz htg_debug_regs_no
DEBUG_S_CHAR('S');
mov rax, dr0
mov [rdx + r8 + CPUMCPU.Host.dr0], rax
@@ -454,6 +557,7 @@ DEBUG_S_CHAR('S');
mov [rdx + r8 + CPUMCPU.Host.dr2], rcx
mov rax, dr3
mov [rdx + r8 + CPUMCPU.Host.dr3], rax
+ or dword [rdx + r8 + CPUMCPU.fUseFlags], CPUM_USED_DEBUG_REGS_HOST
jmp htg_debug_regs_no
@@ -512,12 +616,6 @@ just_a_jump:
ALIGNCODE(16)
GLOBALNAME JmpGCTarget
DEBUG_CHAR('-')
-;mov eax, 0ffff0000h
-;.delay_loop:
-;nop
-;dec eax
-;nop
-;jnz .delay_loop
; load final cr3 and do far jump to load cs.
mov cr3, ebp ; ebp set above
DEBUG_CHAR('0')
@@ -564,7 +662,7 @@ GLOBALNAME JmpGCTarget
mov esi, [edx + CPUMCPU.fUseFlags]
; debug registers
- test esi, CPUM_USE_DEBUG_REGS
+ test esi, CPUM_USE_DEBUG_REGS_HYPER
jnz htg_debug_regs_guest
htg_debug_regs_guest_done:
DEBUG_S_CHAR('9')
@@ -621,11 +719,11 @@ htg_debug_regs_guest:
mov dr2, eax
mov ebx, [edx + CPUMCPU.Hyper.dr + 8*3]
mov dr3, ebx
- ;mov eax, [edx + CPUMCPU.Hyper.dr + 8*6]
- mov ecx, 0ffff0ff0h
+ mov ecx, X86_DR6_INIT_VAL
mov dr6, ecx
mov eax, [edx + CPUMCPU.Hyper.dr + 8*7]
mov dr7, eax
+ or dword [edx + CPUMCPU.fUseFlags], CPUM_USED_DEBUG_REGS_HYPER
jmp htg_debug_regs_guest_done
ENDPROC vmmR0ToRawModeAsm
@@ -786,17 +884,24 @@ vmmRCToHostAsm_SaveNoGeneralRegs:
cli
.if_clear_out:
%endif
+ mov edi, eax ; save return code in EDI (careful with COM_DWORD_REG from here on!)
+
; str [edx + CPUMCPU.Hyper.tr] - double fault only, and it won't be right then either.
sldt [edx + CPUMCPU.Hyper.ldtr.Sel]
; No need to save CRx here. They are set dynamically according to Guest/Host requirements.
; FPU context is saved before restore of host saving (another) branch.
+ ; Disable debug registers if active so they cannot trigger while switching.
+ test dword [edx + CPUMCPU.fUseFlags], CPUM_USED_DEBUG_REGS_HYPER
+ jz .gth_disabled_dr7
+ mov eax, X86_DR7_INIT_VAL
+ mov dr7, eax
+.gth_disabled_dr7:
;;
;; Load Intermediate memory context.
;;
- mov edi, eax ; save return code in EDI (careful with COM_DWORD_REG from here on!)
FIXUP SWITCHER_FIX_INTER_CR3_GC, 1
mov eax, 0ffffffffh
mov cr3, eax
@@ -990,13 +1095,6 @@ gth_restored_cr4:
;mov rcx, [rdx + r8 + CPUMCPU.Host.cr2] ; assumes this is waste of time.
;mov cr2, rcx
- ; restore debug registers (if modified) (esi must still be fUseFlags!)
- ; (must be done after cr4 reload because of the debug extension.)
- test esi, CPUM_USE_DEBUG_REGS | CPUM_USE_DEBUG_REGS_HOST
- jz short gth_debug_regs_no
- jmp gth_debug_regs_restore
-gth_debug_regs_no:
-
; Restore MSRs
mov rbx, rdx
mov ecx, MSR_K8_FS_BASE
@@ -1013,7 +1111,13 @@ gth_debug_regs_no:
wrmsr
mov rdx, rbx
- ; restore general registers.
+ ; Restore debug registers (if modified). (ESI must still be fUseFlags! Must be done late, at least after CR4!)
+ test esi, CPUM_USE_DEBUG_REGS_HOST | CPUM_USED_DEBUG_REGS_HOST | CPUM_USE_DEBUG_REGS_HYPER
+ jnz gth_debug_regs_restore
+gth_debug_regs_done:
+ and dword [rdx + r8 + CPUMCPU.fUseFlags], ~(CPUM_USED_DEBUG_REGS_HOST | CPUM_USED_DEBUG_REGS_HYPER)
+
+ ; Restore general registers.
mov eax, edi ; restore return code. eax = return code !!
; mov rax, [rdx + r8 + CPUMCPU.Host.rax] - scratch + return code
mov rbx, [rdx + r8 + CPUMCPU.Host.rbx]
@@ -1048,10 +1152,15 @@ gth_debug_regs_no:
; edx and edi must be preserved.
gth_debug_regs_restore:
DEBUG_S_CHAR('d')
- xor eax, eax
- mov dr7, rax ; paranoia or not?
- test esi, CPUM_USE_DEBUG_REGS
- jz short gth_debug_regs_dr7
+ mov rax, dr7 ; Some DR7 paranoia first...
+ mov ecx, X86_DR7_INIT_VAL
+ cmp rax, rcx
+ je .gth_debug_skip_dr7_disabling
+ mov dr7, rcx
+.gth_debug_skip_dr7_disabling:
+ test esi, CPUM_USED_DEBUG_REGS_HOST
+ jz .gth_debug_regs_dr7
+
DEBUG_S_CHAR('r')
mov rax, [rdx + r8 + CPUMCPU.Host.dr0]
mov dr0, rax
@@ -1061,12 +1170,14 @@ gth_debug_regs_restore:
mov dr2, rcx
mov rax, [rdx + r8 + CPUMCPU.Host.dr3]
mov dr3, rax
-gth_debug_regs_dr7:
+.gth_debug_regs_dr7:
mov rbx, [rdx + r8 + CPUMCPU.Host.dr6]
mov dr6, rbx
mov rcx, [rdx + r8 + CPUMCPU.Host.dr7]
mov dr7, rcx
- jmp gth_debug_regs_no
+
+ ; We clear the USED flags in the main code path.
+ jmp gth_debug_regs_done
ENDPROC vmmRCToHostAsm