Changeset 72522 in vbox for trunk/src/VBox/VMM/VMMAll/TMAllCpu.cpp
- Timestamp:
- Jun 12, 2018 8:45:27 AM (7 years ago)
- svn:sync-xref-src-repo-rev:
- 123009
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/TMAllCpu.cpp
r69111 r72522 22 22 #define LOG_GROUP LOG_GROUP_TM 23 23 #include <VBox/vmm/tm.h> 24 #include <VBox/vmm/gim.h> 25 #include <VBox/vmm/dbgf.h> 26 #include <VBox/vmm/nem.h> 24 27 #include <iprt/asm-amd64-x86.h> /* for SUPGetCpuHzFromGIP */ 25 28 #include "TMInternal.h" 26 29 #include <VBox/vmm/vm.h> 27 #include <VBox/vmm/gim.h>28 #include <VBox/vmm/dbgf.h>29 30 #include <VBox/sup.h> 30 31 … … 82 83 /** @todo Test that pausing and resuming doesn't cause lag! (I.e. that we're 83 84 * unpaused before the virtual time and stopped after it. */ 84 if (pVM->tm.s.enmTSCMode == TMTSCMODE_REAL_TSC_OFFSET) 85 pVCpu->tm.s.offTSCRawSrc = SUPReadTsc() - pVCpu->tm.s.u64TSC; 86 else 87 pVCpu->tm.s.offTSCRawSrc = tmCpuTickGetRawVirtual(pVM, false /* don't check for pending timers */) 88 - pVCpu->tm.s.u64TSC; 85 switch (pVM->tm.s.enmTSCMode) 86 { 87 case TMTSCMODE_REAL_TSC_OFFSET: 88 pVCpu->tm.s.offTSCRawSrc = SUPReadTsc() - pVCpu->tm.s.u64TSC; 89 break; 90 case TMTSCMODE_VIRT_TSC_EMULATED: 91 case TMTSCMODE_DYNAMIC: 92 pVCpu->tm.s.offTSCRawSrc = tmCpuTickGetRawVirtual(pVM, false /* don't check for pending timers */) 93 - pVCpu->tm.s.u64TSC; 94 break; 95 case TMTSCMODE_NATIVE_API: 96 pVCpu->tm.s.offTSCRawSrc = 0; /** @todo ?? */ 97 break; 98 default: 99 AssertFailedReturn(VERR_IPE_NOT_REACHED_DEFAULT_CASE); 100 } 89 101 return VINF_SUCCESS; 90 102 } … … 117 129 118 130 /* When resuming, use the TSC value of the last stopped VCPU to avoid the TSC going back. */ 119 if (pVM->tm.s.enmTSCMode == TMTSCMODE_REAL_TSC_OFFSET) 120 pVCpu->tm.s.offTSCRawSrc = SUPReadTsc() - pVM->tm.s.u64LastPausedTSC; 121 else 122 pVCpu->tm.s.offTSCRawSrc = tmCpuTickGetRawVirtual(pVM, false /* don't check for pending timers */) 123 - pVM->tm.s.u64LastPausedTSC; 131 switch (pVM->tm.s.enmTSCMode) 132 { 133 case TMTSCMODE_REAL_TSC_OFFSET: 134 pVCpu->tm.s.offTSCRawSrc = SUPReadTsc() - pVM->tm.s.u64LastPausedTSC; 135 break; 136 case TMTSCMODE_VIRT_TSC_EMULATED: 137 case TMTSCMODE_DYNAMIC: 138 pVCpu->tm.s.offTSCRawSrc = tmCpuTickGetRawVirtual(pVM, false /* don't check for pending timers */) 139 - pVM->tm.s.u64LastPausedTSC; 140 break; 141 case TMTSCMODE_NATIVE_API: 142 pVCpu->tm.s.offTSCRawSrc = 0; /** @todo ?? */ 143 break; 144 default: 145 AssertFailedReturn(VERR_IPE_NOT_REACHED_DEFAULT_CASE); 146 } 124 147 125 148 /* Calculate the offset for other VCPUs to use. */ … … 413 436 { 414 437 PVM pVM = pVCpu->CTX_SUFF(pVM); 415 if (pVM->tm.s.enmTSCMode == TMTSCMODE_REAL_TSC_OFFSET) 416 u64 = SUPReadTsc(); 417 else 418 u64 = tmCpuTickGetRawVirtual(pVM, fCheckTimers); 438 switch (pVM->tm.s.enmTSCMode) 439 { 440 case TMTSCMODE_REAL_TSC_OFFSET: 441 u64 = SUPReadTsc(); 442 break; 443 case TMTSCMODE_VIRT_TSC_EMULATED: 444 case TMTSCMODE_DYNAMIC: 445 u64 = tmCpuTickGetRawVirtual(pVM, fCheckTimers); 446 break; 447 #ifndef IN_RC 448 case TMTSCMODE_NATIVE_API: 449 { 450 u64 = 0; 451 int rcNem = NEMHCQueryCpuTick(pVCpu, &u64, NULL); 452 AssertLogRelRCReturn(rcNem, SUPReadTsc()); 453 break; 454 } 455 #endif 456 default: 457 AssertFailedBreakStmt(u64 = SUPReadTsc()); 458 } 419 459 u64 -= pVCpu->tm.s.offTSCRawSrc; 420 460
Note:
See TracChangeset
for help on using the changeset viewer.