VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/PGMAllGst-armv8.cpp.h@ 108994

Last change on this file since 108994 was 108950, checked in by vboxsync, 4 weeks ago

VMM/PGM: Prepare for 5 level paging (which has a -1 start level) and 52-bit output addresses, bugref:10388

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 45.4 KB
Line 
1/* $Id: PGMAllGst-armv8.cpp.h 108950 2025-04-11 13:04:57Z vboxsync $ */
2/** @file
3 * PGM - Page Manager, ARMv8 Guest Paging Template - All context code.
4 */
5
6/*
7 * Copyright (C) 2023-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.215389.xyz.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*
30 *
31 * Mode criteria:
32 * - MMU enabled/disabled.
33 * - TCR_EL1.TG0 (granule size for TTBR0_EL1).
34 * - TCR_EL1.TG1 (granule size for TTBR1_EL1).
35 * - TCR_EL1.T0SZ (address space size for TTBR0_EL1).
36 * - TCR_EL1.T1SZ (address space size for TTBR1_EL1).
37 * - TCR_EL1.IPS (intermediate physical address size).
38 * - TCR_EL1.TBI0 (ignore top address byte for TTBR0_EL1).
39 * - TCR_EL1.TBI1 (ignore top address byte for TTBR1_EL1).
40 * - TCR_EL1.HPD0 (hierarchical permisson disables for TTBR0_EL1).
41 * - TCR_EL1.HPD1 (hierarchical permisson disables for TTBR1_EL1).
42 * - More ?
43 *
44 * Other relevant modifiers:
45 * - TCR_EL1.HA - hardware access bit.
46 * - TCR_EL1.HD - hardware dirty bit.
47 * - ++
48 *
49 * Each privilege EL (1,2,3) has their own TCR_ELx and TTBR[01]_ELx registers,
50 * so they should all have their own separate modes. To make it simpler,
51 * why not do a separate mode for TTBR0_ELx and one for TTBR1_ELx. Top-level
52 * functions determins which of the roots to use and call template (C++)
53 * functions that takes it from there. Using the preprocessor function template
54 * approach is _not_ desirable here.
55 *
56 */
57
58
59/*
60 * Common helpers.
61 * Common helpers.
62 * Common helpers.
63 */
64
65DECLINLINE(int) pgmGstWalkReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel)
66{
67 NOREF(pVCpu);
68 pWalk->fNotPresent = true;
69 pWalk->uLevel = uLevel;
70 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT
71 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
72 return VERR_PAGE_TABLE_NOT_PRESENT;
73}
74
75DECLINLINE(int) pgmGstWalkReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel, int rc)
76{
77 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); NOREF(rc); NOREF(pVCpu);
78 pWalk->fBadPhysAddr = true;
79 pWalk->uLevel = uLevel;
80 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS
81 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
82 return VERR_PAGE_TABLE_NOT_PRESENT;
83}
84
85
86DECLINLINE(int) pgmGstWalkReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALK pWalk, uint8_t uLevel)
87{
88 NOREF(pVCpu);
89 pWalk->fRsvdError = true;
90 pWalk->uLevel = uLevel;
91 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS
92 | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
93 return VERR_PAGE_TABLE_NOT_PRESENT;
94}
95
96
97DECLINLINE(int) pgmGstWalkFastReturnNotPresent(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel)
98{
99 RT_NOREF(pVCpu);
100 pWalk->fFailed = PGM_WALKFAIL_NOT_PRESENT | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
101 return VERR_PAGE_TABLE_NOT_PRESENT;
102}
103
104
105DECLINLINE(int) pgmGstWalkFastReturnBadPhysAddr(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel, int rc)
106{
107 AssertMsg(rc == VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS, ("%Rrc\n", rc)); RT_NOREF(pVCpu, rc);
108 pWalk->fFailed = PGM_WALKFAIL_BAD_PHYSICAL_ADDRESS | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
109 return VERR_PGM_INVALID_GC_PHYSICAL_ADDRESS;
110}
111
112
113DECLINLINE(int) pgmGstWalkFastReturnRsvdError(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint8_t uLevel)
114{
115 RT_NOREF(pVCpu);
116 pWalk->fFailed = PGM_WALKFAIL_RESERVED_BITS | ((uint32_t)uLevel << PGM_WALKFAIL_LEVEL_SHIFT);
117 return VERR_RESERVED_PAGE_TABLE_BITS;
118}
119
120
121/*
122 * Special no paging variant.
123 * Special no paging variant.
124 * Special no paging variant.
125 */
126
127static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneGetPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk)
128{
129 RT_NOREF(pVCpu);
130
131 RT_ZERO(*pWalk);
132 pWalk->fSucceeded = true;
133 pWalk->GCPtr = GCPtr;
134 pWalk->GCPhys = GCPtr;
135 pWalk->fEffective = PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_PGCS_MASK
136 | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK | PGM_PTATTRS_UGCS_MASK;
137 return VINF_SUCCESS;
138}
139
140
141static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneQueryPageFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
142{
143 RT_NOREF(pVCpu, fFlags);
144
145 pWalk->GCPtr = GCPtr;
146 pWalk->GCPhys = GCPtr;
147 pWalk->GCPhysNested = 0;
148 pWalk->fInfo = PGM_WALKINFO_SUCCEEDED;
149 pWalk->fFailed = PGM_WALKFAIL_SUCCESS;
150 pWalk->fEffective = PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_PGCS_MASK
151 | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK | PGM_PTATTRS_UGCS_MASK;
152 return VINF_SUCCESS;
153}
154
155
156static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneModifyPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
157{
158 /* Ignore. */
159 RT_NOREF(pVCpu, GCPtr, cb, fFlags, fMask);
160 return VINF_SUCCESS;
161}
162
163
164static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneWalk)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
165{
166 RT_NOREF(pVCpu, GCPtr, pWalk);
167 pGstWalk->enmType = PGMPTWALKGSTTYPE_INVALID;
168 return VERR_PGM_NOT_USED_IN_MODE;
169}
170
171
172static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneEnter)(PVMCPUCC pVCpu)
173{
174 /* Nothing to do. */
175 RT_NOREF(pVCpu);
176 return VINF_SUCCESS;
177}
178
179
180static PGM_CTX_DECL(int) PGM_CTX(pgm,GstNoneExit)(PVMCPUCC pVCpu)
181{
182 /* Nothing to do. */
183 RT_NOREF(pVCpu);
184 return VINF_SUCCESS;
185}
186
187
188/*
189 * Template variants for actual paging modes.
190 * Template variants for actual paging modes.
191 * Template variants for actual paging modes.
192 */
193#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_MINUS_ONE 0
194#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO 1
195#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE 2
196#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO 3
197#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE 4
198#define PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID 5
199
200
201/*
202 * Descriptor flags to page table attribute flags mapping.
203 */
204static const PGMPTATTRS s_aEffective[] =
205{
206 /* UXN PXN AP[2] AP[1] */
207 /* 0 0 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
208 /* 0 0 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
209 /* 0 0 1 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
210 /* 0 0 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_PX_MASK | PGM_PTATTRS_UX_MASK,
211
212 /* 0 1 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UX_MASK,
213 /* 0 1 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_UX_MASK,
214 /* 0 1 1 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UX_MASK,
215 /* 0 1 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UX_MASK,
216
217 /* 1 0 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_PX_MASK,
218 /* 1 0 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK | PGM_PTATTRS_PX_MASK,
219 /* 1 0 1 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PX_MASK,
220 /* 1 0 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_PX_MASK,
221
222 /* 1 1 0 0 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK,
223 /* 1 1 0 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_PW_MASK | PGM_PTATTRS_UR_MASK | PGM_PTATTRS_UW_MASK ,
224 /* 1 1 1 0 */ PGM_PTATTRS_PR_MASK,
225 /* 1 1 1 1 */ PGM_PTATTRS_PR_MASK | PGM_PTATTRS_UR_MASK,
226};
227
228
229DECL_FORCE_INLINE(int) pgmGstWalkWorkerSetEffective(PPGMPTWALK pWalk, ARMV8VMSA64DESC Desc)
230{
231 uint32_t const idxPerm = RT_BF_GET(Desc, ARMV8_VMSA64_DESC_PG_OR_BLOCK_LATTR_AP)
232 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN_BIT) << 2
233 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN_BIT) << 3;
234
235 pWalk->fEffective = s_aEffective[idxPerm];
236 return VINF_SUCCESS;
237}
238
239
240template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
241DECL_FORCE_INLINE(int) pgmGstWalkWorker(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
242{
243 RT_NOREF(pGstWalk); /** @todo */
244
245 /* This also applies to TG1 granule sizes, as both share the same encoding in TCR. */
246 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_INVALID == ARMV8_TCR_EL1_AARCH64_TG1_INVALID);
247 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_16KB == ARMV8_TCR_EL1_AARCH64_TG1_16KB);
248 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_4KB == ARMV8_TCR_EL1_AARCH64_TG1_4KB);
249 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_64KB == ARMV8_TCR_EL1_AARCH64_TG1_64KB);
250
251 if RT_CONSTEXPR_IF( a_GranuleSz != ARMV8_TCR_EL1_AARCH64_TG0_INVALID
252 && a_InitialLookupLvl != PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID)
253 {
254 uint64_t fLookupMaskFull;
255 RTGCPTR offPageMask;
256
257 RTGCPTR offLvl1BlockMask;
258 RTGCPTR offLvl2BlockMask;
259
260 uint64_t fNextTableOrPageMask;
261 uint8_t cLvl0Shift;
262 uint8_t cLvl1Shift;
263 uint8_t cLvl2Shift;
264 uint8_t cLvl3Shift;
265
266 RTGCPHYS fGCPhysLvl1BlockBase;
267 RTGCPHYS fGCPhysLvl2BlockBase;
268
269 /** @todo This needs to go into defines in armv8.h if final. */
270 if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_4KB)
271 {
272 fLookupMaskFull = RT_BIT_64(9) - 1;
273 offLvl1BlockMask = (RTGCPTR)(_1G - 1);
274 offLvl2BlockMask = (RTGCPTR)(_2M - 1);
275 offPageMask = (RTGCPTR)(_4K - 1);
276 fNextTableOrPageMask = UINT64_C(0xfffffffff000);
277 cLvl0Shift = 39;
278 cLvl1Shift = 30;
279 cLvl2Shift = 21;
280 cLvl3Shift = 12;
281 fGCPhysLvl1BlockBase = UINT64_C(0xffffc0000000);
282 fGCPhysLvl2BlockBase = UINT64_C(0xffffffe00000);
283 }
284 else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_16KB)
285 {
286 fLookupMaskFull = RT_BIT_64(11) - 1;
287 offLvl1BlockMask = 0; /** @todo TCR_EL1.DS support. */
288 offLvl2BlockMask = (RTGCPTR)(_32M - 1);
289 offPageMask = (RTGCPTR)(_16K - 1);
290 fNextTableOrPageMask = UINT64_C(0xffffffffc000);
291 cLvl0Shift = 47;
292 cLvl1Shift = 36;
293 cLvl2Shift = 25;
294 cLvl3Shift = 14;
295 fGCPhysLvl1BlockBase = 0; /* Not supported. */
296 fGCPhysLvl2BlockBase = UINT64_C(0xfffffe000000);
297 }
298 else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_64KB)
299 {
300 Assert(a_InitialLookupLvl > 0);
301
302 fLookupMaskFull = RT_BIT_64(13) - 1;
303 offLvl1BlockMask = 0; /** @todo FEAT_LPA (RTGCPTR)(4*_1T - 1) */
304 offLvl2BlockMask = (RTGCPTR)(_512M - 1);
305 offPageMask = (RTGCPTR)(_64K - 1);
306 fNextTableOrPageMask = UINT64_C(0xffffffff0000);
307 cLvl0Shift = 0; /* No Level 0 with 64KiB granules. */
308 cLvl1Shift = 42;
309 cLvl2Shift = 29;
310 cLvl3Shift = 16;
311 fGCPhysLvl1BlockBase = 0; /* Not supported. */
312 fGCPhysLvl2BlockBase = UINT64_C(0xffffe0000000);
313 }
314
315 pWalk->GCPtr = GCPtr;
316
317 /* Get the initial lookup mask. */
318 uint8_t const bEl = CPUMGetGuestEL(pVCpu);
319 uint64_t fLookupMask;
320 if RT_CONSTEXPR_IF(a_fTtbr0 == true)
321 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
322 else
323 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
324
325 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
326 PARMV8VMSA64DESC paDesc = NULL;
327 ARMV8VMSA64DESC Desc;
328 int rc;
329 if RT_CONSTEXPR_IF(a_InitialLookupLvl == PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO)
330 {
331 Assert(cLvl0Shift != 0);
332 uint8_t const uLvl = 0;
333
334 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, (void **)&paDesc);
335 if (RT_SUCCESS(rc)) { /* probable */ }
336 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
337
338 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl0Shift) & fLookupMask]);
339 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
340 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, uLvl);
341
342 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
343 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, uLvl); /** @todo Only supported if TCR_EL1.DS is set. */
344
345 /* Full lookup mask from now on. */
346 fLookupMask = fLookupMaskFull;
347 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
348 }
349
350 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE)
351 {
352 uint8_t const uLvl = 1;
353
354 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, (void **)&paDesc);
355 if (RT_SUCCESS(rc)) { /* probable */ }
356 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
357
358 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl1Shift) & fLookupMask]);
359 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
360 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, uLvl);
361
362 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
363 else
364 {
365 if (offLvl1BlockMask != 0)
366 {
367 /* Block descriptor. */
368 pWalk->fSucceeded = true;
369 pWalk->fGigantPage = true;
370 pWalk->GCPhys = (RTGCPHYS)(Desc & fGCPhysLvl1BlockBase) | (GCPtr & offLvl1BlockMask);
371 return pgmGstWalkWorkerSetEffective(pWalk, Desc);
372 }
373 else
374 return pgmGstWalkReturnRsvdError(pVCpu, pWalk, uLvl);
375 }
376
377 /* Full lookup mask from now on. */
378 fLookupMask = fLookupMaskFull;
379 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
380 }
381
382 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO)
383 {
384 uint8_t const uLvl = 2;
385
386 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, (void **)&paDesc);
387 if (RT_SUCCESS(rc)) { /* probable */ }
388 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
389
390 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl2Shift) & fLookupMask]);
391 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
392 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, uLvl);
393
394 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
395 else
396 {
397 /* Block descriptor. */
398 pWalk->fSucceeded = true;
399 pWalk->fBigPage = true;
400 pWalk->GCPhys = (RTGCPHYS)(Desc & fGCPhysLvl2BlockBase) | (GCPtr & offLvl2BlockMask);
401 return pgmGstWalkWorkerSetEffective(pWalk, Desc);
402 }
403
404 /* Full lookup mask from now on. */
405 fLookupMask = fLookupMaskFull;
406 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
407 }
408
409 AssertCompile(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE);
410 uint8_t const uLvl = 3;
411
412 /* Next level. */
413 rc = PGM_GCPHYS_2_PTR_BY_VMCPU(pVCpu, GCPhysPt, (void **)&paDesc);
414 if (RT_SUCCESS(rc)) { /* probable */ }
415 else return pgmGstWalkReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
416
417 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl3Shift) & fLookupMask]);
418 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
419 else return pgmGstWalkReturnNotPresent(pVCpu, pWalk, uLvl);
420
421 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
422 else return pgmGstWalkReturnRsvdError(pVCpu, pWalk, uLvl); /* No block descriptors. */
423
424 pWalk->fSucceeded = true;
425 pWalk->GCPhys = (RTGCPHYS)(Desc & fNextTableOrPageMask) | (GCPtr & offPageMask);
426 return pgmGstWalkWorkerSetEffective(pWalk, Desc);
427 }
428 else
429 AssertReleaseFailedReturn(VERR_PGM_MODE_IPE);
430}
431
432
433template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
434static PGM_CTX_DECL(int) PGM_CTX(pgm,GstGetPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk)
435{
436 return pgmGstWalkWorker<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>(pVCpu, GCPtr, pWalk, NULL /*pGstWalk*/);
437}
438
439
440static const PGMWALKFAIL g_aPermPrivRead[] =
441{
442 /* UXN PXN AP[2] AP[1] */
443 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
444 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
445 /* 0 0 1 0 */ PGM_WALKFAIL_SUCCESS,
446 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
447 /* 0 1 0 0 */ PGM_WALKFAIL_SUCCESS,
448 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
449 /* 0 1 1 0 */ PGM_WALKFAIL_SUCCESS,
450 /* 0 1 1 1 */ PGM_WALKFAIL_SUCCESS,
451 /* 1 0 0 0 */ PGM_WALKFAIL_SUCCESS,
452 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
453 /* 1 0 1 0 */ PGM_WALKFAIL_SUCCESS,
454 /* 1 0 1 1 */ PGM_WALKFAIL_SUCCESS,
455 /* 1 1 0 0 */ PGM_WALKFAIL_SUCCESS,
456 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
457 /* 1 1 1 0 */ PGM_WALKFAIL_SUCCESS,
458 /* 1 1 1 1 */ PGM_WALKFAIL_SUCCESS
459};
460
461
462static const PGMWALKFAIL g_aPermPrivWrite[] =
463{
464 /* UXN PXN AP[2] AP[1] */
465 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
466 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
467 /* 0 0 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
468 /* 0 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
469 /* 0 1 0 0 */ PGM_WALKFAIL_SUCCESS,
470 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
471 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
472 /* 0 1 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
473 /* 1 0 0 0 */ PGM_WALKFAIL_SUCCESS,
474 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
475 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
476 /* 1 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
477 /* 1 1 0 0 */ PGM_WALKFAIL_SUCCESS,
478 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
479 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_WRITABLE,
480 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_WRITABLE
481};
482
483
484static const PGMWALKFAIL g_aPermPrivExec[] =
485{
486 /* UXN PXN AP[2] AP[1] */
487 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
488 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
489 /* 0 0 1 0 */ PGM_WALKFAIL_SUCCESS,
490 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
491 /* 0 1 0 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
492 /* 0 1 0 1 */ PGM_WALKFAIL_NOT_EXECUTABLE,
493 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
494 /* 0 1 1 1 */ PGM_WALKFAIL_NOT_EXECUTABLE,
495 /* 1 0 0 0 */ PGM_WALKFAIL_SUCCESS,
496 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
497 /* 1 0 1 0 */ PGM_WALKFAIL_SUCCESS,
498 /* 1 0 1 1 */ PGM_WALKFAIL_SUCCESS,
499 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
500 /* 1 1 0 1 */ PGM_WALKFAIL_NOT_EXECUTABLE,
501 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_EXECUTABLE,
502 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_EXECUTABLE
503};
504
505
506static const PGMWALKFAIL g_aPermUnprivRead[] =
507{
508 /* UXN PXN AP[2] AP[1] */
509 /* 0 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
510 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
511 /* 0 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
512 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
513 /* 0 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
514 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
515 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
516 /* 0 1 1 1 */ PGM_WALKFAIL_SUCCESS,
517 /* 1 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
518 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
519 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
520 /* 1 0 1 1 */ PGM_WALKFAIL_SUCCESS,
521 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
522 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
523 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
524 /* 1 1 1 1 */ PGM_WALKFAIL_SUCCESS
525};
526
527
528static const PGMWALKFAIL g_aPermUnprivWrite[] =
529{
530 /* UXN PXN AP[2] AP[1] */
531 /* 0 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
532 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
533 /* 0 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
534 /* 0 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
535 /* 0 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
536 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
537 /* 0 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
538 /* 0 1 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
539 /* 1 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
540 /* 1 0 0 1 */ PGM_WALKFAIL_SUCCESS,
541 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
542 /* 1 0 1 1 */ PGM_WALKFAIL_NOT_WRITABLE,
543 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE,
544 /* 1 1 0 1 */ PGM_WALKFAIL_SUCCESS,
545 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_WRITABLE,
546 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE
547};
548
549
550static const PGMWALKFAIL g_aPermUnprivExec[] =
551{
552 /* UXN PXN AP[2] AP[1] */
553 /* 0 0 0 0 */ PGM_WALKFAIL_SUCCESS,
554 /* 0 0 0 1 */ PGM_WALKFAIL_SUCCESS,
555 /* 0 0 1 0 */ PGM_WALKFAIL_SUCCESS,
556 /* 0 0 1 1 */ PGM_WALKFAIL_SUCCESS,
557 /* 0 1 0 0 */ PGM_WALKFAIL_SUCCESS,
558 /* 0 1 0 1 */ PGM_WALKFAIL_SUCCESS,
559 /* 0 1 1 0 */ PGM_WALKFAIL_SUCCESS,
560 /* 0 1 1 1 */ PGM_WALKFAIL_SUCCESS,
561 /* 1 0 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
562 /* 1 0 0 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
563 /* 1 0 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
564 /* 1 0 1 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
565 /* 1 1 0 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
566 /* 1 1 0 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
567 /* 1 1 1 0 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE,
568 /* 1 1 1 1 */ PGM_WALKFAIL_NOT_ACCESSIBLE_BY_MODE | PGM_WALKFAIL_NOT_EXECUTABLE
569};
570
571
572DECL_FORCE_INLINE(int) pgmGstQueryPageCheckPermissions(PPGMPTWALKFAST pWalk, ARMV8VMSA64DESC Desc, uint32_t fFlags, uint8_t uLvl)
573{
574 Assert(!(fFlags & ~PGMQPAGE_F_VALID_MASK));
575
576 static const uint32_t *s_apaPerm[] =
577 {
578 /* U X W R */
579 /* 0 0 0 0 */ &g_aPermPrivRead[0], /* Don't check or modify anything, this translates to a privileged read */
580 /* 0 0 0 1 */ &g_aPermPrivRead[0], /* Privileged read access */
581 /* 0 0 1 0 */ &g_aPermPrivWrite[0], /* Privileged write access */
582 /* 0 0 1 1 */ NULL, /* Invalid access flags */
583 /* 0 1 0 0 */ &g_aPermPrivExec[0], /* Privileged execute access */
584 /* 0 1 0 1 */ NULL, /* Invalid access flags */
585 /* 0 1 1 0 */ NULL, /* Invalid access flags */
586 /* 0 1 1 1 */ NULL, /* Invalid access flags */
587
588 /* 1 0 0 0 */ NULL, /* Invalid access flags */
589 /* 1 0 0 1 */ &g_aPermUnprivRead[0], /* Unprivileged read access */
590 /* 1 0 1 0 */ &g_aPermUnprivWrite[0], /* Unprivileged write access */
591 /* 1 0 1 1 */ NULL, /* Invalid access flags */
592 /* 1 1 0 0 */ &g_aPermUnprivExec[0], /* Unprivileged execute access */
593 /* 1 1 0 1 */ NULL, /* Invalid access flags */
594 /* 1 1 1 0 */ NULL, /* Invalid access flags */
595 /* 1 1 1 1 */ NULL, /* Invalid access flags */
596 };
597 Assert(fFlags < RT_ELEMENTS(s_apaPerm));
598
599 const uint32_t *paPerm = s_apaPerm[fFlags];
600 AssertReturn(paPerm, VERR_PGM_MODE_IPE);
601
602 uint32_t const idxPerm = RT_BF_GET(Desc, ARMV8_VMSA64_DESC_PG_OR_BLOCK_LATTR_AP)
603 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_PXN_BIT) << 2
604 | ((Desc & ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN) >> ARMV8_VMSA64_DESC_PG_OR_BLOCK_UATTR_2PRIV_UXN_BIT) << 3;
605
606 pWalk->fEffective = s_aEffective[idxPerm];
607
608 PGMWALKFAIL const fFailed = paPerm[idxPerm];
609 if (fFailed == PGM_WALKFAIL_SUCCESS)
610 {
611 pWalk->fInfo |= PGM_WALKINFO_SUCCEEDED;
612 return VINF_SUCCESS;
613 }
614
615 pWalk->fFailed = fFailed | (uLvl << PGM_WALKFAIL_LEVEL_SHIFT);
616 return VERR_ACCESS_DENIED;
617}
618
619
620template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
621static PGM_CTX_DECL(int) PGM_CTX(pgm,GstQueryPageFast)(PVMCPUCC pVCpu, RTGCPTR GCPtr, uint32_t fFlags, PPGMPTWALKFAST pWalk)
622{
623 /* This also applies to TG1 granule sizes, as both share the same encoding in TCR. */
624 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_INVALID == ARMV8_TCR_EL1_AARCH64_TG1_INVALID);
625 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_16KB == ARMV8_TCR_EL1_AARCH64_TG1_16KB);
626 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_4KB == ARMV8_TCR_EL1_AARCH64_TG1_4KB);
627 AssertCompile(ARMV8_TCR_EL1_AARCH64_TG0_64KB == ARMV8_TCR_EL1_AARCH64_TG1_64KB);
628
629 pWalk->GCPtr = GCPtr;
630
631 if RT_CONSTEXPR_IF( a_GranuleSz != ARMV8_TCR_EL1_AARCH64_TG0_INVALID
632 && a_InitialLookupLvl != PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID)
633 {
634 uint64_t fLookupMaskFull;
635 RTGCPTR offPageMask;
636
637 RTGCPTR offLvl1BlockMask;
638 RTGCPTR offLvl2BlockMask;
639
640 uint64_t fNextTableOrPageMask;
641 uint8_t cLvl0Shift;
642 uint8_t cLvl1Shift;
643 uint8_t cLvl2Shift;
644 uint8_t cLvl3Shift;
645
646 RTGCPHYS fGCPhysLvl1BlockBase;
647 RTGCPHYS fGCPhysLvl2BlockBase;
648
649 /** @todo This needs to go into defines in armv8.h if final. */
650 if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_4KB)
651 {
652 fLookupMaskFull = RT_BIT_64(9) - 1;
653 offLvl1BlockMask = (RTGCPTR)(_1G - 1);
654 offLvl2BlockMask = (RTGCPTR)(_2M - 1);
655 offPageMask = (RTGCPTR)(_4K - 1);
656 fNextTableOrPageMask = UINT64_C(0xfffffffff000);
657 cLvl0Shift = 39;
658 cLvl1Shift = 30;
659 cLvl2Shift = 21;
660 cLvl3Shift = 12;
661 fGCPhysLvl1BlockBase = UINT64_C(0xffffc0000000);
662 fGCPhysLvl2BlockBase = UINT64_C(0xffffffe00000);
663 }
664 else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_16KB)
665 {
666 fLookupMaskFull = RT_BIT_64(11) - 1;
667 offLvl1BlockMask = 0; /** @todo TCR_EL1.DS support. */
668 offLvl2BlockMask = (RTGCPTR)(_32M - 1);
669 offPageMask = (RTGCPTR)(_16K - 1);
670 fNextTableOrPageMask = UINT64_C(0xffffffffc000);
671 cLvl0Shift = 47;
672 cLvl1Shift = 36;
673 cLvl2Shift = 25;
674 cLvl3Shift = 14;
675 fGCPhysLvl1BlockBase = 0; /* Not supported. */
676 fGCPhysLvl2BlockBase = UINT64_C(0xfffffe000000);
677 }
678 else if RT_CONSTEXPR_IF(a_GranuleSz == ARMV8_TCR_EL1_AARCH64_TG0_64KB)
679 {
680 Assert(a_InitialLookupLvl > 0);
681
682 fLookupMaskFull = RT_BIT_64(13) - 1;
683 offLvl1BlockMask = 0; /** @todo FEAT_LPA (RTGCPTR)(4*_1T - 1) */
684 offLvl2BlockMask = (RTGCPTR)(_512M - 1);
685 offPageMask = (RTGCPTR)(_64K - 1);
686 fNextTableOrPageMask = UINT64_C(0xffffffff0000);
687 cLvl0Shift = 0; /* No Level 0 with 64KiB granules. */
688 cLvl1Shift = 42;
689 cLvl2Shift = 29;
690 cLvl3Shift = 16;
691 fGCPhysLvl1BlockBase = 0; /* Not supported. */
692 fGCPhysLvl2BlockBase = UINT64_C(0xffffe0000000);
693 }
694
695 /* Get the initial lookup mask. */
696 uint8_t const bEl = (fFlags & PGMQPAGE_F_USER_MODE) ? 0 : 1; /** @todo EL2 support */
697 uint64_t fLookupMask;
698 if RT_CONSTEXPR_IF(a_fTtbr0 == true)
699 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr0[bEl];
700 else
701 fLookupMask = pVCpu->pgm.s.afLookupMaskTtbr1[bEl];
702
703 RTGCPHYS GCPhysPt = CPUMGetEffectiveTtbr(pVCpu, GCPtr);
704 PARMV8VMSA64DESC paDesc = NULL;
705 ARMV8VMSA64DESC Desc;
706 int rc;
707 if RT_CONSTEXPR_IF(a_InitialLookupLvl == PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO)
708 {
709 Assert(cLvl0Shift != 0);
710 uint8_t const uLvl = 0;
711
712 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
713 if (RT_SUCCESS(rc)) { /* probable */ }
714 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
715
716 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl0Shift) & fLookupMask]);
717 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
718 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
719
720 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
721 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl); /** @todo Only supported if TCR_EL1.DS is set. */
722
723 /* Full lookup mask from now on. */
724 fLookupMask = fLookupMaskFull;
725 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
726 }
727
728 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE)
729 {
730 uint8_t const uLvl = 1;
731
732 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
733 if (RT_SUCCESS(rc)) { /* probable */ }
734 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
735
736 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl1Shift) & fLookupMask]);
737 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
738 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
739
740 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
741 else
742 {
743 if (offLvl1BlockMask != 0)
744 {
745 /* Block descriptor. */
746 pWalk->fInfo = PGM_WALKINFO_GIGANTIC_PAGE;
747 pWalk->GCPhys = (RTGCPHYS)(Desc & fGCPhysLvl1BlockBase) | (GCPtr & offLvl1BlockMask);
748 return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
749 }
750 else
751 return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl);
752 }
753
754 /* Full lookup mask from now on. */
755 fLookupMask = fLookupMaskFull;
756 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
757 }
758
759 if RT_CONSTEXPR_IF(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO)
760 {
761 uint8_t const uLvl = 2;
762
763 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
764 if (RT_SUCCESS(rc)) { /* probable */ }
765 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
766
767 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl2Shift) & fLookupMask]);
768 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
769 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
770
771 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
772 else
773 {
774 /* Block descriptor. */
775 pWalk->fInfo = PGM_WALKINFO_BIG_PAGE;
776 pWalk->GCPhys = (RTGCPHYS)(Desc & fGCPhysLvl2BlockBase) | (GCPtr & offLvl2BlockMask);
777 return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
778 }
779
780 /* Full lookup mask from now on. */
781 fLookupMask = fLookupMaskFull;
782 GCPhysPt = (RTGCPHYS)(Desc & fNextTableOrPageMask);
783 }
784
785 AssertCompile(a_InitialLookupLvl <= PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE);
786 uint8_t const uLvl = 3;
787
788 /* Next level. */
789 rc = pgmPhysGCPhys2CCPtrLockless(pVCpu, GCPhysPt, (void **)&paDesc);
790 if (RT_SUCCESS(rc)) { /* probable */ }
791 else return pgmGstWalkFastReturnBadPhysAddr(pVCpu, pWalk, uLvl, rc);
792
793 Desc = ASMAtomicUoReadU64(&paDesc[(GCPtr >> cLvl3Shift) & fLookupMask]);
794 if (Desc & ARMV8_VMSA64_DESC_F_VALID) { /* probable */ }
795 else return pgmGstWalkFastReturnNotPresent(pVCpu, pWalk, uLvl);
796
797 if (Desc & ARMV8_VMSA64_DESC_F_TBL_OR_PG) { /* probable */ }
798 else return pgmGstWalkFastReturnRsvdError(pVCpu, pWalk, uLvl); /* No block descriptors. */
799
800 pWalk->GCPhys = (RTGCPHYS)(Desc & fNextTableOrPageMask) | (GCPtr & offPageMask);
801 return pgmGstQueryPageCheckPermissions(pWalk, Desc, fFlags, uLvl);
802 }
803 else
804 AssertReleaseFailedReturn(VERR_PGM_MODE_IPE);
805}
806
807
808template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
809static PGM_CTX_DECL(int) PGM_CTX(pgm,GstModifyPage)(PVMCPUCC pVCpu, RTGCPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
810{
811 /** @todo Ignore for now. */
812 RT_NOREF(pVCpu, GCPtr, cb, fFlags, fMask);
813 return VINF_SUCCESS;
814}
815
816
817template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
818static PGM_CTX_DECL(int) PGM_CTX(pgm,GstWalk)(PVMCPUCC pVCpu, RTGCPTR GCPtr, PPGMPTWALK pWalk, PPGMPTWALKGST pGstWalk)
819{
820 pGstWalk->enmType = PGMPTWALKGSTTYPE_INVALID;
821 return pgmGstWalkWorker<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>(pVCpu, GCPtr, pWalk, pGstWalk);
822}
823
824
825template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
826static PGM_CTX_DECL(int) PGM_CTX(pgm,GstEnter)(PVMCPUCC pVCpu)
827{
828 /* Nothing to do for now. */
829 RT_NOREF(pVCpu);
830 return VINF_SUCCESS;
831}
832
833
834template<bool a_fTtbr0, uint8_t a_InitialLookupLvl, uint8_t a_GranuleSz, bool a_fTbi, bool a_fEpd, bool a_f52BitOa>
835static PGM_CTX_DECL(int) PGM_CTX(pgm,GstExit)(PVMCPUCC pVCpu)
836{
837 /* Nothing to do for now. */
838 RT_NOREF(pVCpu);
839 return VINF_SUCCESS;
840}
841
842
843/**
844 * Guest mode data array.
845 */
846PGMMODEDATAGST const g_aPgmGuestModeData[PGM_GUEST_MODE_DATA_ARRAY_SIZE] =
847{
848 { UINT32_MAX, NULL, NULL, NULL, NULL, NULL }, /* 0 */
849 {
850 PGM_TYPE_NONE,
851 PGM_CTX(pgm,GstNoneGetPage),
852 PGM_CTX(pgm,GstNoneQueryPageFast),
853 PGM_CTX(pgm,GstNoneModifyPage),
854 PGM_CTX(pgm,GstNoneWalk),
855 PGM_CTX(pgm,GstNoneEnter),
856 PGM_CTX(pgm,GstNoneExit),
857 },
858
859#define PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa) \
860 (2 + ( (a_f52BitOa ? RT_BIT_32(8) : 0) \
861 | (a_fEpd ? RT_BIT_32(7) : 0) \
862 | (a_fTbi ? RT_BIT_32(6) : 0) \
863 | (a_GranuleSz << 4) \
864 | (a_InitialLookupLvl << 1) \
865 | (a_fTtbr0 ? RT_BIT_32(0) : 0) ))
866
867#define PGM_MODE_CREATE_EX(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa) \
868 { \
869 PGM_MODE_TYPE_CREATE(a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa), \
870 PGM_CTX(pgm,GstGetPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>, \
871 PGM_CTX(pgm,GstQueryPageFast)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>, \
872 PGM_CTX(pgm,GstModifyPage)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>, \
873 PGM_CTX(pgm,GstWalk)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>, \
874 PGM_CTX(pgm,GstEnter)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa>, \
875 PGM_CTX(pgm,GstExit)<a_fTtbr0, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa> \
876 }
877
878#define PGM_MODE_CREATE_TTBR(a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa) \
879 PGM_MODE_CREATE_EX(false, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa), \
880 PGM_MODE_CREATE_EX(true, a_InitialLookupLvl, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa)
881
882#define PGM_MODE_CREATE_LOOKUP_LVL(a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa) \
883 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_MINUS_ONE, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa), \
884 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), \
885 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), \
886 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), \
887 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), \
888 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), /* Filler for 3 bit lookup level */ \
889 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ), /* Filler for 3 bit lookup level */ \
890 PGM_MODE_CREATE_TTBR(PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_INVALID, a_GranuleSz, a_fTbi, a_fEpd, a_f52BitOa ) /* Filler for 3 bit lookup level */
891
892#define PGM_MODE_CREATE_GRANULE_SZ(a_fTbi, a_fEpd, a_f52BitOa) \
893 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_INVALID, a_fTbi, a_fEpd, a_f52BitOa), \
894 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_16KB, a_fTbi, a_fEpd, a_f52BitOa), \
895 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_4KB, a_fTbi, a_fEpd, a_f52BitOa), \
896 PGM_MODE_CREATE_LOOKUP_LVL(ARMV8_TCR_EL1_AARCH64_TG1_64KB, a_fTbi, a_fEpd, a_f52BitOa)
897
898#define PGM_MODE_CREATE_TBI(a_fEpd, a_f52BitOa) \
899 PGM_MODE_CREATE_GRANULE_SZ(false, a_fEpd, a_f52BitOa), \
900 PGM_MODE_CREATE_GRANULE_SZ(true, a_fEpd, a_f52BitOa)
901
902#define PGM_MODE_CREATE_EPD(a_f52BitOa) \
903 PGM_MODE_CREATE_TBI(false, a_f52BitOa), \
904 PGM_MODE_CREATE_TBI(true, a_f52BitOa)
905
906 /* Recursive expansion for the win, this will blow up to 512 entries covering all possible modes. */
907 PGM_MODE_CREATE_EPD(false),
908 PGM_MODE_CREATE_EPD(true)
909
910#undef PGM_MODE_CREATE_EPD
911#undef PGM_MODE_CREATE_TBI
912#undef PGM_MODE_CREATE_GRANULE_SZ
913#undef PGM_MODE_CREATE_LOOKUP_LVL
914#undef PGM_MODE_CREATE_TTBR
915#undef PGM_MODE_CREATE_EX
916};
917
918
919template<uint8_t a_offTsz, uint8_t a_offTg, uint8_t a_offTbi, uint8_t a_offEpd, bool a_fTtbr0>
920DECLINLINE(uintptr_t) pgmR3DeduceTypeFromTcr(uint64_t u64RegSctlr, uint64_t u64RegTcr, uint64_t *pfInitialLookupMask)
921{
922 uintptr_t idxNewGst = 0;
923
924 /*
925 * MMU enabled at all?
926 * Technically this is incorrect as we use ARMV8_SCTLR_EL1_M regardless of the EL but the bit is the same
927 * for all exception levels.
928 */
929 if (u64RegSctlr & ARMV8_SCTLR_EL1_M)
930 {
931 uint64_t const u64Tsz = (u64RegTcr >> a_offTsz) & 0x1f;
932 uint64_t u64Tg = (u64RegTcr >> a_offTg) & 0x3;
933 bool const fTbi = RT_BOOL(u64RegTcr & RT_BIT_64(a_offTbi));
934 bool const fEpd = RT_BOOL(u64RegTcr & RT_BIT_64(a_offEpd));
935
936 /*
937 * From the ARM reference manual regarding granule size choices:
938 *
939 * If the value is programmed to either a reserved value or a size that has not been implemented, then
940 * the hardware will treat the field as if it has been programmed to an IMPLEMENTATION DEFINED
941 * choice of the sizes that has been implemented for all purposes other than the value read back from
942 * this register.
943 *
944 * We always fall back on the 4KiB granule size in that case.
945 */
946 /** @todo Can this be made table driven? */
947 uint64_t uLookupLvl;
948 if (u64Tg == ARMV8_TCR_EL1_AARCH64_TG0_16KB)
949 {
950 if (u64Tsz <= 16)
951 {
952 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO;
953 *pfInitialLookupMask = 0x1;
954 }
955 else if (u64Tsz >= 17 && u64Tsz <= 27)
956 {
957 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE;
958 *pfInitialLookupMask = RT_BIT_64(28 - u64Tsz + 1) - 1;
959 }
960 else if (u64Tsz >= 28 && u64Tsz <= 38)
961 {
962 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO;
963 *pfInitialLookupMask = RT_BIT_64(38 - u64Tsz + 1) - 1;
964 }
965 else /* if (u64Tsz == 39) */
966 {
967 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE;
968 *pfInitialLookupMask = 0x1;
969 }
970 }
971 else if (u64Tg == ARMV8_TCR_EL1_AARCH64_TG0_64KB)
972 {
973 if (/*u64Tsz >= 16 &&*/ u64Tsz <= 21)
974 {
975 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE;
976 *pfInitialLookupMask = RT_BIT_64(21 - u64Tsz + 1) - 1;
977 }
978 else if (u64Tsz >= 22 && u64Tsz <= 34)
979 {
980 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO;
981 *pfInitialLookupMask = RT_BIT_64(34 - u64Tsz + 1) - 1;
982 }
983 else /*if (u64Tsz >= 35 && u64Tsz <= 39)*/
984 {
985 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_THREE;
986 if (u64Tsz <= 39)
987 *pfInitialLookupMask = RT_BIT_64(39 - u64Tsz + 1) - 1;
988 else
989 *pfInitialLookupMask = 0x1;
990 }
991 }
992 else /* if (u64Tg == ARMV8_TCR_EL1_AARCH64_TG0_4KB) */
993 {
994 /*
995 * From: https://github.com/codingbelief/arm-architecture-reference-manual-for-armv8-a/blob/master/en/chapter_d4/d42_2_controlling_address_translation_stages.md
996 * For all translation stages
997 * The maximum TxSZ value is 39. If TxSZ is programmed to a value larger than 39 then it is IMPLEMENTATION DEFINED whether:
998 * - The implementation behaves as if the field is programmed to 39 for all purposes other than reading back the value of the field.
999 * - Any use of the TxSZ value generates a Level 0 Translation fault for the stage of translation at which TxSZ is used.
1000 *
1001 * For a stage 1 translation
1002 * The minimum TxSZ value is 16. If TxSZ is programmed to a value smaller than 16 then it is IMPLEMENTATION DEFINED whether:
1003 * - The implementation behaves as if the field were programmed to 16 for all purposes other than reading back the value of the field.
1004 * - Any use of the TxSZ value generates a stage 1 Level 0 Translation fault.
1005 *
1006 * We currently choose the former for both.
1007 */
1008 if (/*u64Tsz >= 16 &&*/ u64Tsz <= 24)
1009 {
1010 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ZERO;
1011 if (u64Tsz >= 16)
1012 *pfInitialLookupMask = RT_BIT_64(24 - u64Tsz + 1) - 1;
1013 else
1014 *pfInitialLookupMask = RT_BIT_64(9) - 1;
1015 }
1016 else if (u64Tsz >= 25 && u64Tsz <= 33)
1017 {
1018 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_ONE;
1019 *pfInitialLookupMask = RT_BIT_64(33 - u64Tsz + 1) - 1;
1020 }
1021 else /*if (u64Tsz >= 34 && u64Tsz <= 39)*/
1022 {
1023 uLookupLvl = PGM_MODE_ARMV8_INITIAL_LOOKUP_LVL_TWO;
1024 if (u64Tsz <= 39)
1025 *pfInitialLookupMask = RT_BIT_64(39 - u64Tsz + 1) - 1;
1026 else
1027 *pfInitialLookupMask = 0x1;
1028 }
1029
1030 u64Tg = ARMV8_TCR_EL1_AARCH64_TG0_4KB;
1031 }
1032
1033 /* Build the index into the PGM mode callback table for the given config. */
1034 idxNewGst = PGM_MODE_TYPE_CREATE(a_fTtbr0, uLookupLvl, u64Tg, fTbi, fEpd, false /*f53BitOa*/);
1035 }
1036 else
1037 idxNewGst = PGM_TYPE_NONE;
1038
1039 return idxNewGst;
1040}
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette