VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65754

Last change on this file since 65754 was 65754, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x7e split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 667.3 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65754 2017-02-13 09:14:10Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
560 {
561 RTGCPTR GCPtrEff;
562 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
563 if (rcStrict != VINF_SUCCESS)
564 return rcStrict;
565 }
566#endif
567 IEMOP_HLP_DONE_DECODING();
568 }
569 return IEMOP_RAISE_INVALID_OPCODE();
570}
571
572
573/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
574 * immediate. */
575FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
576{
577 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
578 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
579 {
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
581#ifndef TST_IEM_CHECK_MC
582 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
583 {
584 RTGCPTR GCPtrEff;
585 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
586 if (rcStrict != VINF_SUCCESS)
587 return rcStrict;
588 }
589#endif
590 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
591 IEMOP_HLP_DONE_DECODING();
592 }
593 return IEMOP_RAISE_INVALID_OPCODE();
594}
595
596
597/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
598 * sequence. */
599FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
600{
601 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
603 {
604 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
606#ifndef TST_IEM_CHECK_MC
607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
608 {
609 RTGCPTR GCPtrEff;
610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
611 if (rcStrict != VINF_SUCCESS)
612 return rcStrict;
613 }
614#endif
615 IEMOP_HLP_DONE_DECODING();
616 }
617 return IEMOP_RAISE_INVALID_OPCODE();
618}
619
620
621/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
622 * a 8-byte immediate. */
623FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
624{
625 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
626 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
627 {
628 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
630#ifndef TST_IEM_CHECK_MC
631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
632 {
633 RTGCPTR GCPtrEff;
634 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
635 if (rcStrict != VINF_SUCCESS)
636 return rcStrict;
637 }
638#endif
639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
640 IEMOP_HLP_DONE_DECODING();
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643}
644
645
646
647/** @name ..... opcodes.
648 *
649 * @{
650 */
651
652/** @} */
653
654
655/** @name Two byte opcodes (first byte 0x0f).
656 *
657 * @{
658 */
659
660/** Opcode 0x0f 0x00 /0. */
661FNIEMOPRM_DEF(iemOp_Grp6_sldt)
662{
663 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
664 IEMOP_HLP_MIN_286();
665 IEMOP_HLP_NO_REAL_OR_V86_MODE();
666
667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
668 {
669 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
670 switch (pVCpu->iem.s.enmEffOpSize)
671 {
672 case IEMMODE_16BIT:
673 IEM_MC_BEGIN(0, 1);
674 IEM_MC_LOCAL(uint16_t, u16Ldtr);
675 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
676 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
677 IEM_MC_ADVANCE_RIP();
678 IEM_MC_END();
679 break;
680
681 case IEMMODE_32BIT:
682 IEM_MC_BEGIN(0, 1);
683 IEM_MC_LOCAL(uint32_t, u32Ldtr);
684 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
685 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
686 IEM_MC_ADVANCE_RIP();
687 IEM_MC_END();
688 break;
689
690 case IEMMODE_64BIT:
691 IEM_MC_BEGIN(0, 1);
692 IEM_MC_LOCAL(uint64_t, u64Ldtr);
693 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
694 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
695 IEM_MC_ADVANCE_RIP();
696 IEM_MC_END();
697 break;
698
699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
700 }
701 }
702 else
703 {
704 IEM_MC_BEGIN(0, 2);
705 IEM_MC_LOCAL(uint16_t, u16Ldtr);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
708 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
709 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
710 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
711 IEM_MC_ADVANCE_RIP();
712 IEM_MC_END();
713 }
714 return VINF_SUCCESS;
715}
716
717
718/** Opcode 0x0f 0x00 /1. */
719FNIEMOPRM_DEF(iemOp_Grp6_str)
720{
721 IEMOP_MNEMONIC(str, "str Rv/Mw");
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 switch (pVCpu->iem.s.enmEffOpSize)
729 {
730 case IEMMODE_16BIT:
731 IEM_MC_BEGIN(0, 1);
732 IEM_MC_LOCAL(uint16_t, u16Tr);
733 IEM_MC_FETCH_TR_U16(u16Tr);
734 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
735 IEM_MC_ADVANCE_RIP();
736 IEM_MC_END();
737 break;
738
739 case IEMMODE_32BIT:
740 IEM_MC_BEGIN(0, 1);
741 IEM_MC_LOCAL(uint32_t, u32Tr);
742 IEM_MC_FETCH_TR_U32(u32Tr);
743 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
744 IEM_MC_ADVANCE_RIP();
745 IEM_MC_END();
746 break;
747
748 case IEMMODE_64BIT:
749 IEM_MC_BEGIN(0, 1);
750 IEM_MC_LOCAL(uint64_t, u64Tr);
751 IEM_MC_FETCH_TR_U64(u64Tr);
752 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
753 IEM_MC_ADVANCE_RIP();
754 IEM_MC_END();
755 break;
756
757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
758 }
759 }
760 else
761 {
762 IEM_MC_BEGIN(0, 2);
763 IEM_MC_LOCAL(uint16_t, u16Tr);
764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
766 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
767 IEM_MC_FETCH_TR_U16(u16Tr);
768 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 return VINF_SUCCESS;
773}
774
775
776/** Opcode 0x0f 0x00 /2. */
777FNIEMOPRM_DEF(iemOp_Grp6_lldt)
778{
779 IEMOP_MNEMONIC(lldt, "lldt Ew");
780 IEMOP_HLP_MIN_286();
781 IEMOP_HLP_NO_REAL_OR_V86_MODE();
782
783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
784 {
785 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
786 IEM_MC_BEGIN(1, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 0);
788 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 else
793 {
794 IEM_MC_BEGIN(1, 1);
795 IEM_MC_ARG(uint16_t, u16Sel, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
799 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
800 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
802 IEM_MC_END();
803 }
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x00 /3. */
809FNIEMOPRM_DEF(iemOp_Grp6_ltr)
810{
811 IEMOP_MNEMONIC(ltr, "ltr Ew");
812 IEMOP_HLP_MIN_286();
813 IEMOP_HLP_NO_REAL_OR_V86_MODE();
814
815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
816 {
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_BEGIN(1, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 0);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 else
825 {
826 IEM_MC_BEGIN(1, 1);
827 IEM_MC_ARG(uint16_t, u16Sel, 0);
828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
832 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
833 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
834 IEM_MC_END();
835 }
836 return VINF_SUCCESS;
837}
838
839
840/** Opcode 0x0f 0x00 /3. */
841FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
842{
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_NO_REAL_OR_V86_MODE();
845
846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
847 {
848 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Sel, 0);
851 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
852 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 else
857 {
858 IEM_MC_BEGIN(2, 1);
859 IEM_MC_ARG(uint16_t, u16Sel, 0);
860 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
863 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
864 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
866 IEM_MC_END();
867 }
868 return VINF_SUCCESS;
869}
870
871
872/** Opcode 0x0f 0x00 /4. */
873FNIEMOPRM_DEF(iemOp_Grp6_verr)
874{
875 IEMOP_MNEMONIC(verr, "verr Ew");
876 IEMOP_HLP_MIN_286();
877 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
878}
879
880
881/** Opcode 0x0f 0x00 /5. */
882FNIEMOPRM_DEF(iemOp_Grp6_verw)
883{
884 IEMOP_MNEMONIC(verw, "verw Ew");
885 IEMOP_HLP_MIN_286();
886 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
887}
888
889
890/**
891 * Group 6 jump table.
892 */
893IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
894{
895 iemOp_Grp6_sldt,
896 iemOp_Grp6_str,
897 iemOp_Grp6_lldt,
898 iemOp_Grp6_ltr,
899 iemOp_Grp6_verr,
900 iemOp_Grp6_verw,
901 iemOp_InvalidWithRM,
902 iemOp_InvalidWithRM
903};
904
905/** Opcode 0x0f 0x00. */
906FNIEMOP_DEF(iemOp_Grp6)
907{
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
910}
911
912
913/** Opcode 0x0f 0x01 /0. */
914FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
915{
916 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
917 IEMOP_HLP_MIN_286();
918 IEMOP_HLP_64BIT_OP_SIZE();
919 IEM_MC_BEGIN(2, 1);
920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
925 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 /0. */
932FNIEMOP_DEF(iemOp_Grp7_vmcall)
933{
934 IEMOP_BITCH_ABOUT_STUB();
935 return IEMOP_RAISE_INVALID_OPCODE();
936}
937
938
939/** Opcode 0x0f 0x01 /0. */
940FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
941{
942 IEMOP_BITCH_ABOUT_STUB();
943 return IEMOP_RAISE_INVALID_OPCODE();
944}
945
946
947/** Opcode 0x0f 0x01 /0. */
948FNIEMOP_DEF(iemOp_Grp7_vmresume)
949{
950 IEMOP_BITCH_ABOUT_STUB();
951 return IEMOP_RAISE_INVALID_OPCODE();
952}
953
954
955/** Opcode 0x0f 0x01 /0. */
956FNIEMOP_DEF(iemOp_Grp7_vmxoff)
957{
958 IEMOP_BITCH_ABOUT_STUB();
959 return IEMOP_RAISE_INVALID_OPCODE();
960}
961
962
963/** Opcode 0x0f 0x01 /1. */
964FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
965{
966 IEMOP_MNEMONIC(sidt, "sidt Ms");
967 IEMOP_HLP_MIN_286();
968 IEMOP_HLP_64BIT_OP_SIZE();
969 IEM_MC_BEGIN(2, 1);
970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
971 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
975 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
976 IEM_MC_END();
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /1. */
982FNIEMOP_DEF(iemOp_Grp7_monitor)
983{
984 IEMOP_MNEMONIC(monitor, "monitor");
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
986 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
987}
988
989
990/** Opcode 0x0f 0x01 /1. */
991FNIEMOP_DEF(iemOp_Grp7_mwait)
992{
993 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
996}
997
998
999/** Opcode 0x0f 0x01 /2. */
1000FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1001{
1002 IEMOP_MNEMONIC(lgdt, "lgdt");
1003 IEMOP_HLP_64BIT_OP_SIZE();
1004 IEM_MC_BEGIN(3, 1);
1005 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1006 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1007 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1011 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1012 IEM_MC_END();
1013 return VINF_SUCCESS;
1014}
1015
1016
1017/** Opcode 0x0f 0x01 0xd0. */
1018FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1019{
1020 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1021 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x0f 0x01 0xd1. */
1031FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1032{
1033 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1034 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1035 {
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1037 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1038 }
1039 return IEMOP_RAISE_INVALID_OPCODE();
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /3. */
1044FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1045{
1046 IEMOP_MNEMONIC(lidt, "lidt");
1047 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1048 ? IEMMODE_64BIT
1049 : pVCpu->iem.s.enmEffOpSize;
1050 IEM_MC_BEGIN(3, 1);
1051 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1057 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1058 IEM_MC_END();
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/** Opcode 0x0f 0x01 0xd8. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1065
1066/** Opcode 0x0f 0x01 0xd9. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1068
1069/** Opcode 0x0f 0x01 0xda. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1071
1072/** Opcode 0x0f 0x01 0xdb. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1074
1075/** Opcode 0x0f 0x01 0xdc. */
1076FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1077
1078/** Opcode 0x0f 0x01 0xdd. */
1079FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1080
1081/** Opcode 0x0f 0x01 0xde. */
1082FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1083
1084/** Opcode 0x0f 0x01 0xdf. */
1085FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1086
1087/** Opcode 0x0f 0x01 /4. */
1088FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1089{
1090 IEMOP_MNEMONIC(smsw, "smsw");
1091 IEMOP_HLP_MIN_286();
1092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1093 {
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 switch (pVCpu->iem.s.enmEffOpSize)
1096 {
1097 case IEMMODE_16BIT:
1098 IEM_MC_BEGIN(0, 1);
1099 IEM_MC_LOCAL(uint16_t, u16Tmp);
1100 IEM_MC_FETCH_CR0_U16(u16Tmp);
1101 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1102 { /* likely */ }
1103 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1104 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1105 else
1106 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1107 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 return VINF_SUCCESS;
1111
1112 case IEMMODE_32BIT:
1113 IEM_MC_BEGIN(0, 1);
1114 IEM_MC_LOCAL(uint32_t, u32Tmp);
1115 IEM_MC_FETCH_CR0_U32(u32Tmp);
1116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 return VINF_SUCCESS;
1120
1121 case IEMMODE_64BIT:
1122 IEM_MC_BEGIN(0, 1);
1123 IEM_MC_LOCAL(uint64_t, u64Tmp);
1124 IEM_MC_FETCH_CR0_U64(u64Tmp);
1125 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1126 IEM_MC_ADVANCE_RIP();
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129
1130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1131 }
1132 }
1133 else
1134 {
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(0, 2);
1137 IEM_MC_LOCAL(uint16_t, u16Tmp);
1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_FETCH_CR0_U16(u16Tmp);
1142 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1143 { /* likely */ }
1144 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1145 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1146 else
1147 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 return VINF_SUCCESS;
1152 }
1153}
1154
1155
1156/** Opcode 0x0f 0x01 /6. */
1157FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1158{
1159 /* The operand size is effectively ignored, all is 16-bit and only the
1160 lower 3-bits are used. */
1161 IEMOP_MNEMONIC(lmsw, "lmsw");
1162 IEMOP_HLP_MIN_286();
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(1, 0);
1167 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1168 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1170 IEM_MC_END();
1171 }
1172 else
1173 {
1174 IEM_MC_BEGIN(1, 1);
1175 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1179 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1180 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x01 /7. */
1188FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1189{
1190 IEMOP_MNEMONIC(invlpg, "invlpg");
1191 IEMOP_HLP_MIN_486();
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(1, 1);
1194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1196 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199}
1200
1201
1202/** Opcode 0x0f 0x01 /7. */
1203FNIEMOP_DEF(iemOp_Grp7_swapgs)
1204{
1205 IEMOP_MNEMONIC(swapgs, "swapgs");
1206 IEMOP_HLP_ONLY_64BIT();
1207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1209}
1210
1211
1212/** Opcode 0x0f 0x01 /7. */
1213FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1214{
1215 NOREF(pVCpu);
1216 IEMOP_BITCH_ABOUT_STUB();
1217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1218}
1219
1220
1221/** Opcode 0x0f 0x01. */
1222FNIEMOP_DEF(iemOp_Grp7)
1223{
1224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1226 {
1227 case 0:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1233 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1234 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1235 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1236 }
1237 return IEMOP_RAISE_INVALID_OPCODE();
1238
1239 case 1:
1240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1241 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1242 switch (bRm & X86_MODRM_RM_MASK)
1243 {
1244 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1245 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1246 }
1247 return IEMOP_RAISE_INVALID_OPCODE();
1248
1249 case 2:
1250 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1251 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1252 switch (bRm & X86_MODRM_RM_MASK)
1253 {
1254 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1255 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1256 }
1257 return IEMOP_RAISE_INVALID_OPCODE();
1258
1259 case 3:
1260 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1261 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1262 switch (bRm & X86_MODRM_RM_MASK)
1263 {
1264 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1265 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1266 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1267 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1268 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1269 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1270 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1271 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1273 }
1274
1275 case 4:
1276 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1277
1278 case 5:
1279 return IEMOP_RAISE_INVALID_OPCODE();
1280
1281 case 6:
1282 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1283
1284 case 7:
1285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1286 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1287 switch (bRm & X86_MODRM_RM_MASK)
1288 {
1289 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1290 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1291 }
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296}
1297
1298/** Opcode 0x0f 0x00 /3. */
1299FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1300{
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1303
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1307 switch (pVCpu->iem.s.enmEffOpSize)
1308 {
1309 case IEMMODE_16BIT:
1310 {
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Sel, 1);
1314 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1315
1316 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1318 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1319
1320 IEM_MC_END();
1321 return VINF_SUCCESS;
1322 }
1323
1324 case IEMMODE_32BIT:
1325 case IEMMODE_64BIT:
1326 {
1327 IEM_MC_BEGIN(3, 0);
1328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Sel, 1);
1330 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1331
1332 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1333 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1334 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1335
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1341 }
1342 }
1343 else
1344 {
1345 switch (pVCpu->iem.s.enmEffOpSize)
1346 {
1347 case IEMMODE_16BIT:
1348 {
1349 IEM_MC_BEGIN(3, 1);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Sel, 1);
1352 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1354
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1356 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1357
1358 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1361
1362 IEM_MC_END();
1363 return VINF_SUCCESS;
1364 }
1365
1366 case IEMMODE_32BIT:
1367 case IEMMODE_64BIT:
1368 {
1369 IEM_MC_BEGIN(3, 1);
1370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1371 IEM_MC_ARG(uint16_t, u16Sel, 1);
1372 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1377/** @todo testcase: make sure it's a 16-bit read. */
1378
1379 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1380 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1381 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1382
1383 IEM_MC_END();
1384 return VINF_SUCCESS;
1385 }
1386
1387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1388 }
1389 }
1390}
1391
1392
1393
1394/** Opcode 0x0f 0x02. */
1395FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1396{
1397 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1398 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1399}
1400
1401
1402/** Opcode 0x0f 0x03. */
1403FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1404{
1405 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1406 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1407}
1408
1409
1410/** Opcode 0x0f 0x05. */
1411FNIEMOP_DEF(iemOp_syscall)
1412{
1413 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1416}
1417
1418
1419/** Opcode 0x0f 0x06. */
1420FNIEMOP_DEF(iemOp_clts)
1421{
1422 IEMOP_MNEMONIC(clts, "clts");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1425}
1426
1427
1428/** Opcode 0x0f 0x07. */
1429FNIEMOP_DEF(iemOp_sysret)
1430{
1431 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1434}
1435
1436
1437/** Opcode 0x0f 0x08. */
1438FNIEMOP_STUB(iemOp_invd);
1439// IEMOP_HLP_MIN_486();
1440
1441
1442/** Opcode 0x0f 0x09. */
1443FNIEMOP_DEF(iemOp_wbinvd)
1444{
1445 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1446 IEMOP_HLP_MIN_486();
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS; /* ignore for now */
1453}
1454
1455
1456/** Opcode 0x0f 0x0b. */
1457FNIEMOP_DEF(iemOp_ud2)
1458{
1459 IEMOP_MNEMONIC(ud2, "ud2");
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462
1463/** Opcode 0x0f 0x0d. */
1464FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1465{
1466 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1468 {
1469 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1477 return IEMOP_RAISE_INVALID_OPCODE();
1478 }
1479
1480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1481 {
1482 case 2: /* Aliased to /0 for the time being. */
1483 case 4: /* Aliased to /0 for the time being. */
1484 case 5: /* Aliased to /0 for the time being. */
1485 case 6: /* Aliased to /0 for the time being. */
1486 case 7: /* Aliased to /0 for the time being. */
1487 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1488 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1489 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1491 }
1492
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 /* Currently a NOP. */
1498 NOREF(GCPtrEffSrc);
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 return VINF_SUCCESS;
1502}
1503
1504
1505/** Opcode 0x0f 0x0e. */
1506FNIEMOP_STUB(iemOp_femms);
1507
1508
1509/** Opcode 0x0f 0x0f 0x0c. */
1510FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x0d. */
1513FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x1c. */
1516FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x1d. */
1519FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x8a. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x8e. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x90. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x94. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1532
1533/** Opcode 0x0f 0x0f 0x96. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0x97. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0x9a. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0x9e. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xa0. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xa4. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xa6. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xa7. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xaa. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xae. */
1561FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xb0. */
1564FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xb4. */
1567FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1568
1569/** Opcode 0x0f 0x0f 0xb6. */
1570FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1571
1572/** Opcode 0x0f 0x0f 0xb7. */
1573FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1574
1575/** Opcode 0x0f 0x0f 0xbb. */
1576FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1577
1578/** Opcode 0x0f 0x0f 0xbf. */
1579FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1580
1581
1582/** Opcode 0x0f 0x0f. */
1583FNIEMOP_DEF(iemOp_3Dnow)
1584{
1585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1586 {
1587 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 }
1590
1591 /* This is pretty sparse, use switch instead of table. */
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 switch (b)
1594 {
1595 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1596 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1597 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1598 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1599 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1600 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1601 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1602 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1603 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1604 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1605 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1606 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1607 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1608 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1609 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1610 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1611 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1612 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1613 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1614 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1615 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1616 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1617 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1618 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622}
1623
1624
1625/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1626FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1627/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1628FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1629/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1630FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1631/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1632FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1633
1634
1635/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1636FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1637{
1638 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1646 IEM_MC_BEGIN(0, 0);
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1651 IEM_MC_ADVANCE_RIP();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 /*
1657 * Memory, register.
1658 */
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1679FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1680
1681/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1682FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1683
1684/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1685FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1686{
1687 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1690 {
1691 /*
1692 * Register, register.
1693 */
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697
1698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1700 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1701 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1702
1703 IEM_MC_ADVANCE_RIP();
1704 IEM_MC_END();
1705 }
1706 else
1707 {
1708 /*
1709 * Memory, register.
1710 */
1711 IEM_MC_BEGIN(0, 2);
1712 IEM_MC_LOCAL(uint64_t, uSrc);
1713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1714
1715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1719
1720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1722
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1732
1733/** Opcode 0x66 0x0f 0x12. */
1734FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1735
1736/** Opcode 0xf3 0x0f 0x12. */
1737FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1738
1739/** Opcode 0xf2 0x0f 0x12. */
1740FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1741
1742/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1743FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1744
1745/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1746FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1747{
1748 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752#if 0
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1757 IEM_MC_BEGIN(0, 1);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1761 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765#else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767#endif
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/* Opcode 0xf3 0x0f 0x13 - invalid */
1793/* Opcode 0xf2 0x0f 0x13 - invalid */
1794
1795/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1796FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x14 - invalid */
1800/* Opcode 0xf2 0x0f 0x14 - invalid */
1801/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1802FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1803/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1804FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1805/* Opcode 0xf3 0x0f 0x15 - invalid */
1806/* Opcode 0xf2 0x0f 0x15 - invalid */
1807/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1808FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1809/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1810FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1811/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1812FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1813/* Opcode 0xf2 0x0f 0x16 - invalid */
1814/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1815FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1816/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1817FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1818/* Opcode 0xf3 0x0f 0x17 - invalid */
1819/* Opcode 0xf2 0x0f 0x17 - invalid */
1820
1821
1822/** Opcode 0x0f 0x18. */
1823FNIEMOP_DEF(iemOp_prefetch_Grp16)
1824{
1825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1826 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1827 {
1828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1829 {
1830 case 4: /* Aliased to /0 for the time being according to AMD. */
1831 case 5: /* Aliased to /0 for the time being according to AMD. */
1832 case 6: /* Aliased to /0 for the time being according to AMD. */
1833 case 7: /* Aliased to /0 for the time being according to AMD. */
1834 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1835 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1836 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1837 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1839 }
1840
1841 IEM_MC_BEGIN(0, 1);
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 /* Currently a NOP. */
1846 NOREF(GCPtrEffSrc);
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 return VINF_SUCCESS;
1850 }
1851
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/** Opcode 0x0f 0x19..0x1f. */
1857FNIEMOP_DEF(iemOp_nop_Ev)
1858{
1859 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 IEM_MC_BEGIN(0, 1);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 /* Currently a NOP. */
1875 NOREF(GCPtrEffSrc);
1876 IEM_MC_ADVANCE_RIP();
1877 IEM_MC_END();
1878 }
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** Opcode 0x0f 0x20. */
1884FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1885{
1886 /* mod is ignored, as is operand size overrides. */
1887 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1888 IEMOP_HLP_MIN_386();
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1891 else
1892 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1893
1894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1895 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1897 {
1898 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1900 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1901 iCrReg |= 8;
1902 }
1903 switch (iCrReg)
1904 {
1905 case 0: case 2: case 3: case 4: case 8:
1906 break;
1907 default:
1908 return IEMOP_RAISE_INVALID_OPCODE();
1909 }
1910 IEMOP_HLP_DONE_DECODING();
1911
1912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1913}
1914
1915
1916/** Opcode 0x0f 0x21. */
1917FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1918{
1919 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1920 IEMOP_HLP_MIN_386();
1921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1924 return IEMOP_RAISE_INVALID_OPCODE();
1925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1926 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1927 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1928}
1929
1930
1931/** Opcode 0x0f 0x22. */
1932FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1933{
1934 /* mod is ignored, as is operand size overrides. */
1935 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1936 IEMOP_HLP_MIN_386();
1937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1938 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1939 else
1940 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1941
1942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1943 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1945 {
1946 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1948 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1949 iCrReg |= 8;
1950 }
1951 switch (iCrReg)
1952 {
1953 case 0: case 2: case 3: case 4: case 8:
1954 break;
1955 default:
1956 return IEMOP_RAISE_INVALID_OPCODE();
1957 }
1958 IEMOP_HLP_DONE_DECODING();
1959
1960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1961}
1962
1963
1964/** Opcode 0x0f 0x23. */
1965FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1966{
1967 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1968 IEMOP_HLP_MIN_386();
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1972 return IEMOP_RAISE_INVALID_OPCODE();
1973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1974 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1975 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1976}
1977
1978
1979/** Opcode 0x0f 0x24. */
1980FNIEMOP_DEF(iemOp_mov_Rd_Td)
1981{
1982 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1983 /** @todo works on 386 and 486. */
1984 /* The RM byte is not considered, see testcase. */
1985 return IEMOP_RAISE_INVALID_OPCODE();
1986}
1987
1988
1989/** Opcode 0x0f 0x26. */
1990FNIEMOP_DEF(iemOp_mov_Td_Rd)
1991{
1992 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1993 /** @todo works on 386 and 486. */
1994 /* The RM byte is not considered, see testcase. */
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
2000FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 /*
2007 * Register, register.
2008 */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_BEGIN(0, 0);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2042FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2043{
2044 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2056 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081}
2082
2083/* Opcode 0xf3 0x0f 0x28 - invalid */
2084/* Opcode 0xf2 0x0f 0x28 - invalid */
2085
2086/** Opcode 0x0f 0x29. */
2087FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2088{
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2091 else
2092 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2095 {
2096 /*
2097 * Register, register.
2098 */
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2100 IEM_MC_BEGIN(0, 0);
2101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 else
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2106 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2107 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Memory, register.
2115 */
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2124 else
2125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2127
2128 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2129 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135}
2136
2137
2138/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2139FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2140/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2141FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2142/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2143FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2144/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2145FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2146
2147
2148/** Opcode 0x0f 0x2b. */
2149FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2150{
2151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2152 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2153 else
2154 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2157 {
2158 /*
2159 * memory, register.
2160 */
2161 IEM_MC_BEGIN(0, 2);
2162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 else
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172
2173 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2187FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2188/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2189FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2190/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2191FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2192/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2193FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2194
2195/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2196FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2197/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2198FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2199/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2200FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2201/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2202FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2203
2204/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2205FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2206/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2207FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2208/* Opcode 0xf3 0x0f 0x2e - invalid */
2209/* Opcode 0xf2 0x0f 0x2e - invalid */
2210
2211/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2212FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2213/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2214FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2215/* Opcode 0xf3 0x0f 0x2f - invalid */
2216/* Opcode 0xf2 0x0f 0x2f - invalid */
2217
2218/** Opcode 0x0f 0x30. */
2219FNIEMOP_DEF(iemOp_wrmsr)
2220{
2221 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2224}
2225
2226
2227/** Opcode 0x0f 0x31. */
2228FNIEMOP_DEF(iemOp_rdtsc)
2229{
2230 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2233}
2234
2235
2236/** Opcode 0x0f 0x33. */
2237FNIEMOP_DEF(iemOp_rdmsr)
2238{
2239 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2242}
2243
2244
2245/** Opcode 0x0f 0x34. */
2246FNIEMOP_STUB(iemOp_rdpmc);
2247/** Opcode 0x0f 0x34. */
2248FNIEMOP_STUB(iemOp_sysenter);
2249/** Opcode 0x0f 0x35. */
2250FNIEMOP_STUB(iemOp_sysexit);
2251/** Opcode 0x0f 0x37. */
2252FNIEMOP_STUB(iemOp_getsec);
2253/** Opcode 0x0f 0x38. */
2254FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2255/** Opcode 0x0f 0x3a. */
2256FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2257
2258
2259/**
2260 * Implements a conditional move.
2261 *
2262 * Wish there was an obvious way to do this where we could share and reduce
2263 * code bloat.
2264 *
2265 * @param a_Cnd The conditional "microcode" operation.
2266 */
2267#define CMOV_X(a_Cnd) \
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2270 { \
2271 switch (pVCpu->iem.s.enmEffOpSize) \
2272 { \
2273 case IEMMODE_16BIT: \
2274 IEM_MC_BEGIN(0, 1); \
2275 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2276 a_Cnd { \
2277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 1); \
2286 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2287 a_Cnd { \
2288 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2289 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2290 } IEM_MC_ELSE() { \
2291 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2292 } IEM_MC_ENDIF(); \
2293 IEM_MC_ADVANCE_RIP(); \
2294 IEM_MC_END(); \
2295 return VINF_SUCCESS; \
2296 \
2297 case IEMMODE_64BIT: \
2298 IEM_MC_BEGIN(0, 1); \
2299 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2300 a_Cnd { \
2301 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2303 } IEM_MC_ENDIF(); \
2304 IEM_MC_ADVANCE_RIP(); \
2305 IEM_MC_END(); \
2306 return VINF_SUCCESS; \
2307 \
2308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2309 } \
2310 } \
2311 else \
2312 { \
2313 switch (pVCpu->iem.s.enmEffOpSize) \
2314 { \
2315 case IEMMODE_16BIT: \
2316 IEM_MC_BEGIN(0, 2); \
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2318 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2320 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2321 a_Cnd { \
2322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2323 } IEM_MC_ENDIF(); \
2324 IEM_MC_ADVANCE_RIP(); \
2325 IEM_MC_END(); \
2326 return VINF_SUCCESS; \
2327 \
2328 case IEMMODE_32BIT: \
2329 IEM_MC_BEGIN(0, 2); \
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2331 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2333 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2334 a_Cnd { \
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2336 } IEM_MC_ELSE() { \
2337 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2338 } IEM_MC_ENDIF(); \
2339 IEM_MC_ADVANCE_RIP(); \
2340 IEM_MC_END(); \
2341 return VINF_SUCCESS; \
2342 \
2343 case IEMMODE_64BIT: \
2344 IEM_MC_BEGIN(0, 2); \
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2346 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2349 a_Cnd { \
2350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2351 } IEM_MC_ENDIF(); \
2352 IEM_MC_ADVANCE_RIP(); \
2353 IEM_MC_END(); \
2354 return VINF_SUCCESS; \
2355 \
2356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2357 } \
2358 } do {} while (0)
2359
2360
2361
2362/** Opcode 0x0f 0x40. */
2363FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2364{
2365 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2367}
2368
2369
2370/** Opcode 0x0f 0x41. */
2371FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2372{
2373 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2375}
2376
2377
2378/** Opcode 0x0f 0x42. */
2379FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2380{
2381 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2382 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2383}
2384
2385
2386/** Opcode 0x0f 0x43. */
2387FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2388{
2389 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2390 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2391}
2392
2393
2394/** Opcode 0x0f 0x44. */
2395FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2396{
2397 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2399}
2400
2401
2402/** Opcode 0x0f 0x45. */
2403FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2404{
2405 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2407}
2408
2409
2410/** Opcode 0x0f 0x46. */
2411FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2412{
2413 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2414 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2415}
2416
2417
2418/** Opcode 0x0f 0x47. */
2419FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2420{
2421 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2422 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2423}
2424
2425
2426/** Opcode 0x0f 0x48. */
2427FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2428{
2429 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2430 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2431}
2432
2433
2434/** Opcode 0x0f 0x49. */
2435FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2436{
2437 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2438 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2439}
2440
2441
2442/** Opcode 0x0f 0x4a. */
2443FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2446 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2447}
2448
2449
2450/** Opcode 0x0f 0x4b. */
2451FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2452{
2453 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2455}
2456
2457
2458/** Opcode 0x0f 0x4c. */
2459FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2460{
2461 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2462 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2463}
2464
2465
2466/** Opcode 0x0f 0x4d. */
2467FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2468{
2469 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2470 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2471}
2472
2473
2474/** Opcode 0x0f 0x4e. */
2475FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2476{
2477 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2478 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2479}
2480
2481
2482/** Opcode 0x0f 0x4f. */
2483FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2484{
2485 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2486 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2487}
2488
2489#undef CMOV_X
2490
2491/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2492FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2493/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2494FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2495/* Opcode 0xf3 0x0f 0x50 - invalid */
2496/* Opcode 0xf2 0x0f 0x50 - invalid */
2497
2498/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2499FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2500/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2501FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2502/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2503FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2504/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2505FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2506
2507/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2508FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2509/* Opcode 0x66 0x0f 0x52 - invalid */
2510/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2511FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2512/* Opcode 0xf2 0x0f 0x52 - invalid */
2513
2514/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2515FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2516/* Opcode 0x66 0x0f 0x53 - invalid */
2517/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2518FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2519/* Opcode 0xf2 0x0f 0x53 - invalid */
2520
2521/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2522FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2523/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2524FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2525/* Opcode 0xf3 0x0f 0x54 - invalid */
2526/* Opcode 0xf2 0x0f 0x54 - invalid */
2527
2528/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2529FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2530/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2531FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2532/* Opcode 0xf3 0x0f 0x55 - invalid */
2533/* Opcode 0xf2 0x0f 0x55 - invalid */
2534
2535/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2536FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2537/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2538FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2539/* Opcode 0xf3 0x0f 0x56 - invalid */
2540/* Opcode 0xf2 0x0f 0x56 - invalid */
2541
2542/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2543FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2544/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2545FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2546/* Opcode 0xf3 0x0f 0x57 - invalid */
2547/* Opcode 0xf2 0x0f 0x57 - invalid */
2548
2549/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2568FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2569/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2570FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2571/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2572FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2573/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2574FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2575
2576/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2577FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2578/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2579FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2580/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2581FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2582/* Opcode 0xf2 0x0f 0x5b - invalid */
2583
2584/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2585FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2586/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2587FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2588/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2589FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2590/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2591FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2592
2593/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2594FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2595/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2596FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2597/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2598FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2599/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2600FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2601
2602/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2603FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2604/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2605FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2606/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2607FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2608/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2609FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2610
2611/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2612FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2613/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2614FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2615/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2616FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2617/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2618FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2619
2620/**
2621 * Common worker for MMX instructions on the forms:
2622 * pxxxx mm1, mm2/mem32
2623 *
2624 * The 2nd operand is the first half of a register, which in the memory case
2625 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2626 * memory accessed for MMX.
2627 *
2628 * Exceptions type 4.
2629 */
2630FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 {
2635 /*
2636 * Register, register.
2637 */
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2639 IEM_MC_BEGIN(2, 0);
2640 IEM_MC_ARG(uint128_t *, pDst, 0);
2641 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2643 IEM_MC_PREPARE_SSE_USAGE();
2644 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2645 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2646 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2647 IEM_MC_ADVANCE_RIP();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 /*
2653 * Register, memory.
2654 */
2655 IEM_MC_BEGIN(2, 2);
2656 IEM_MC_ARG(uint128_t *, pDst, 0);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2660
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665
2666 IEM_MC_PREPARE_SSE_USAGE();
2667 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2668 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2669
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674}
2675
2676
2677/**
2678 * Common worker for SSE2 instructions on the forms:
2679 * pxxxx xmm1, xmm2/mem128
2680 *
2681 * The 2nd operand is the first half of a register, which in the memory case
2682 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2683 * memory accessed for MMX.
2684 *
2685 * Exceptions type 4.
2686 */
2687FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2688{
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (!pImpl->pfnU64)
2691 return IEMOP_RAISE_INVALID_OPCODE();
2692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2693 {
2694 /*
2695 * Register, register.
2696 */
2697 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2698 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(2, 0);
2701 IEM_MC_ARG(uint64_t *, pDst, 0);
2702 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_PREPARE_FPU_USAGE();
2705 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2706 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2707 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * Register, memory.
2715 */
2716 IEM_MC_BEGIN(2, 2);
2717 IEM_MC_ARG(uint64_t *, pDst, 0);
2718 IEM_MC_LOCAL(uint32_t, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2721
2722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2725 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726
2727 IEM_MC_PREPARE_FPU_USAGE();
2728 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2729 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2730
2731 IEM_MC_ADVANCE_RIP();
2732 IEM_MC_END();
2733 }
2734 return VINF_SUCCESS;
2735}
2736
2737
2738/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2739FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2740{
2741 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2742 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2743}
2744
2745/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2746FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2747{
2748 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2749 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2750}
2751
2752/* Opcode 0xf3 0x0f 0x60 - invalid */
2753
2754
2755/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2756FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2757{
2758 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2759 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2760}
2761
2762/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2763FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2764{
2765 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2766 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2767}
2768
2769/* Opcode 0xf3 0x0f 0x61 - invalid */
2770
2771
2772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2774{
2775 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2777}
2778
2779/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2780FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2781{
2782 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2783 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2784}
2785
2786/* Opcode 0xf3 0x0f 0x62 - invalid */
2787
2788
2789
2790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2791FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x63 - invalid */
2795
2796/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x64 - invalid */
2801
2802/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2805FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2806/* Opcode 0xf3 0x0f 0x65 - invalid */
2807
2808/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2809FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2811FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2812/* Opcode 0xf3 0x0f 0x66 - invalid */
2813
2814/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2815FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2816/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2818/* Opcode 0xf3 0x0f 0x67 - invalid */
2819
2820
2821/**
2822 * Common worker for MMX instructions on the form:
2823 * pxxxx mm1, mm2/mem64
2824 *
2825 * The 2nd operand is the second half of a register, which in the memory case
2826 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2827 * where it may read the full 128 bits or only the upper 64 bits.
2828 *
2829 * Exceptions type 4.
2830 */
2831FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2832{
2833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2834 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /*
2838 * Register, register.
2839 */
2840 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2841 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEM_MC_BEGIN(2, 0);
2844 IEM_MC_ARG(uint64_t *, pDst, 0);
2845 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2846 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2847 IEM_MC_PREPARE_FPU_USAGE();
2848 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2849 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2850 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 else
2855 {
2856 /*
2857 * Register, memory.
2858 */
2859 IEM_MC_BEGIN(2, 2);
2860 IEM_MC_ARG(uint64_t *, pDst, 0);
2861 IEM_MC_LOCAL(uint64_t, uSrc);
2862 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2864
2865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2868 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2869
2870 IEM_MC_PREPARE_FPU_USAGE();
2871 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2872 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2873
2874 IEM_MC_ADVANCE_RIP();
2875 IEM_MC_END();
2876 }
2877 return VINF_SUCCESS;
2878}
2879
2880
2881/**
2882 * Common worker for SSE2 instructions on the form:
2883 * pxxxx xmm1, xmm2/mem128
2884 *
2885 * The 2nd operand is the second half of a register, which in the memory case
2886 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2887 * where it may read the full 128 bits or only the upper 64 bits.
2888 *
2889 * Exceptions type 4.
2890 */
2891FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2892{
2893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2895 {
2896 /*
2897 * Register, register.
2898 */
2899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2900 IEM_MC_BEGIN(2, 0);
2901 IEM_MC_ARG(uint128_t *, pDst, 0);
2902 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2904 IEM_MC_PREPARE_SSE_USAGE();
2905 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2906 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2907 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2908 IEM_MC_ADVANCE_RIP();
2909 IEM_MC_END();
2910 }
2911 else
2912 {
2913 /*
2914 * Register, memory.
2915 */
2916 IEM_MC_BEGIN(2, 2);
2917 IEM_MC_ARG(uint128_t *, pDst, 0);
2918 IEM_MC_LOCAL(uint128_t, uSrc);
2919 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2921
2922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2924 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2925 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2926
2927 IEM_MC_PREPARE_SSE_USAGE();
2928 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2929 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2939FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2940{
2941 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2942 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2943}
2944
2945/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2946FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2947{
2948 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2949 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2950}
2951/* Opcode 0xf3 0x0f 0x68 - invalid */
2952
2953
2954/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2955FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2956{
2957 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2958 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2959}
2960
2961/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2962FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2963{
2964 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2965 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2966
2967}
2968/* Opcode 0xf3 0x0f 0x69 - invalid */
2969
2970
2971/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2972FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2973{
2974 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2975 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2976}
2977
2978/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2979FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2980{
2981 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2982 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2983}
2984/* Opcode 0xf3 0x0f 0x6a - invalid */
2985
2986
2987/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2988FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2989/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2990FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2991/* Opcode 0xf3 0x0f 0x6b - invalid */
2992
2993
2994/* Opcode 0x0f 0x6c - invalid */
2995
2996/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2997FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2998{
2999 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
3000 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3001}
3002
3003/* Opcode 0xf3 0x0f 0x6c - invalid */
3004/* Opcode 0xf2 0x0f 0x6c - invalid */
3005
3006
3007/* Opcode 0x0f 0x6d - invalid */
3008
3009/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
3010FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3011{
3012 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
3013 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3014}
3015
3016/* Opcode 0xf3 0x0f 0x6d - invalid */
3017
3018
3019/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3020FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3021{
3022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3023 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3024 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3025 else
3026 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3028 {
3029 /* MMX, greg */
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_BEGIN(0, 1);
3032 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3037 else
3038 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3039 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3040 IEM_MC_ADVANCE_RIP();
3041 IEM_MC_END();
3042 }
3043 else
3044 {
3045 /* MMX, [mem] */
3046 IEM_MC_BEGIN(0, 2);
3047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3052 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3053 {
3054 IEM_MC_LOCAL(uint64_t, u64Tmp);
3055 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3056 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3057 }
3058 else
3059 {
3060 IEM_MC_LOCAL(uint32_t, u32Tmp);
3061 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3062 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3063 }
3064 IEM_MC_ADVANCE_RIP();
3065 IEM_MC_END();
3066 }
3067 return VINF_SUCCESS;
3068}
3069
3070/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
3071FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3072{
3073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3074 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3075 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
3076 else
3077 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
3078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3079 {
3080 /* XMM, greg*/
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_BEGIN(0, 1);
3083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3085 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3086 {
3087 IEM_MC_LOCAL(uint64_t, u64Tmp);
3088 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3089 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3090 }
3091 else
3092 {
3093 IEM_MC_LOCAL(uint32_t, u32Tmp);
3094 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3095 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3096 }
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 else
3101 {
3102 /* XMM, [mem] */
3103 IEM_MC_BEGIN(0, 2);
3104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3105 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3110 {
3111 IEM_MC_LOCAL(uint64_t, u64Tmp);
3112 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3113 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3114 }
3115 else
3116 {
3117 IEM_MC_LOCAL(uint32_t, u32Tmp);
3118 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3119 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3120 }
3121 IEM_MC_ADVANCE_RIP();
3122 IEM_MC_END();
3123 }
3124 return VINF_SUCCESS;
3125}
3126
3127/* Opcode 0xf3 0x0f 0x6e - invalid */
3128
3129
3130/** Opcode 0x0f 0x6f - movq Pq, Qq */
3131FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3132{
3133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3134 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3136 {
3137 /*
3138 * Register, register.
3139 */
3140 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3141 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3143 IEM_MC_BEGIN(0, 1);
3144 IEM_MC_LOCAL(uint64_t, u64Tmp);
3145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3146 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3147 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3148 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3149 IEM_MC_ADVANCE_RIP();
3150 IEM_MC_END();
3151 }
3152 else
3153 {
3154 /*
3155 * Register, memory.
3156 */
3157 IEM_MC_BEGIN(0, 2);
3158 IEM_MC_LOCAL(uint64_t, u64Tmp);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3164 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3165 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3166 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3167
3168 IEM_MC_ADVANCE_RIP();
3169 IEM_MC_END();
3170 }
3171 return VINF_SUCCESS;
3172}
3173
3174/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
3175FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3176{
3177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3178 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3180 {
3181 /*
3182 * Register, register.
3183 */
3184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3185 IEM_MC_BEGIN(0, 0);
3186 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3187 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3188 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3189 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3190 IEM_MC_ADVANCE_RIP();
3191 IEM_MC_END();
3192 }
3193 else
3194 {
3195 /*
3196 * Register, memory.
3197 */
3198 IEM_MC_BEGIN(0, 2);
3199 IEM_MC_LOCAL(uint128_t, u128Tmp);
3200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3201
3202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3205 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3206 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3207 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3208
3209 IEM_MC_ADVANCE_RIP();
3210 IEM_MC_END();
3211 }
3212 return VINF_SUCCESS;
3213}
3214
3215/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
3216FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3217{
3218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3219 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3221 {
3222 /*
3223 * Register, register.
3224 */
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226 IEM_MC_BEGIN(0, 0);
3227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3229 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3230 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3231 IEM_MC_ADVANCE_RIP();
3232 IEM_MC_END();
3233 }
3234 else
3235 {
3236 /*
3237 * Register, memory.
3238 */
3239 IEM_MC_BEGIN(0, 2);
3240 IEM_MC_LOCAL(uint128_t, u128Tmp);
3241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3242
3243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3247 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3248 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3249
3250 IEM_MC_ADVANCE_RIP();
3251 IEM_MC_END();
3252 }
3253 return VINF_SUCCESS;
3254}
3255
3256
3257/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3258FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3259{
3260 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3263 {
3264 /*
3265 * Register, register.
3266 */
3267 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3269
3270 IEM_MC_BEGIN(3, 0);
3271 IEM_MC_ARG(uint64_t *, pDst, 0);
3272 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3273 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3274 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3275 IEM_MC_PREPARE_FPU_USAGE();
3276 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3277 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3278 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3279 IEM_MC_ADVANCE_RIP();
3280 IEM_MC_END();
3281 }
3282 else
3283 {
3284 /*
3285 * Register, memory.
3286 */
3287 IEM_MC_BEGIN(3, 2);
3288 IEM_MC_ARG(uint64_t *, pDst, 0);
3289 IEM_MC_LOCAL(uint64_t, uSrc);
3290 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3292
3293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3294 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3295 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3297 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3298
3299 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3300 IEM_MC_PREPARE_FPU_USAGE();
3301 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3302 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3303
3304 IEM_MC_ADVANCE_RIP();
3305 IEM_MC_END();
3306 }
3307 return VINF_SUCCESS;
3308}
3309
3310/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
3311FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3312{
3313 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
3314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3316 {
3317 /*
3318 * Register, register.
3319 */
3320 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3322
3323 IEM_MC_BEGIN(3, 0);
3324 IEM_MC_ARG(uint128_t *, pDst, 0);
3325 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3326 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3327 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3328 IEM_MC_PREPARE_SSE_USAGE();
3329 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3330 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3331 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3332 IEM_MC_ADVANCE_RIP();
3333 IEM_MC_END();
3334 }
3335 else
3336 {
3337 /*
3338 * Register, memory.
3339 */
3340 IEM_MC_BEGIN(3, 2);
3341 IEM_MC_ARG(uint128_t *, pDst, 0);
3342 IEM_MC_LOCAL(uint128_t, uSrc);
3343 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3345
3346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3347 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3348 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3350 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3351
3352 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3353 IEM_MC_PREPARE_SSE_USAGE();
3354 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3355 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3356
3357 IEM_MC_ADVANCE_RIP();
3358 IEM_MC_END();
3359 }
3360 return VINF_SUCCESS;
3361}
3362
3363/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
3364FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3365{
3366 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
3367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3369 {
3370 /*
3371 * Register, register.
3372 */
3373 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3375
3376 IEM_MC_BEGIN(3, 0);
3377 IEM_MC_ARG(uint128_t *, pDst, 0);
3378 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3379 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3380 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3381 IEM_MC_PREPARE_SSE_USAGE();
3382 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3383 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3384 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3385 IEM_MC_ADVANCE_RIP();
3386 IEM_MC_END();
3387 }
3388 else
3389 {
3390 /*
3391 * Register, memory.
3392 */
3393 IEM_MC_BEGIN(3, 2);
3394 IEM_MC_ARG(uint128_t *, pDst, 0);
3395 IEM_MC_LOCAL(uint128_t, uSrc);
3396 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3398
3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3400 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3401 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3403 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3404
3405 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3406 IEM_MC_PREPARE_SSE_USAGE();
3407 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3408 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3409
3410 IEM_MC_ADVANCE_RIP();
3411 IEM_MC_END();
3412 }
3413 return VINF_SUCCESS;
3414}
3415
3416/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
3417FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3418{
3419 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
3420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3422 {
3423 /*
3424 * Register, register.
3425 */
3426 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3428
3429 IEM_MC_BEGIN(3, 0);
3430 IEM_MC_ARG(uint128_t *, pDst, 0);
3431 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3432 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3434 IEM_MC_PREPARE_SSE_USAGE();
3435 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3436 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3437 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 }
3441 else
3442 {
3443 /*
3444 * Register, memory.
3445 */
3446 IEM_MC_BEGIN(3, 2);
3447 IEM_MC_ARG(uint128_t *, pDst, 0);
3448 IEM_MC_LOCAL(uint128_t, uSrc);
3449 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3451
3452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3453 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3454 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3456 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3457
3458 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3459 IEM_MC_PREPARE_SSE_USAGE();
3460 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3461 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3462
3463 IEM_MC_ADVANCE_RIP();
3464 IEM_MC_END();
3465 }
3466 return VINF_SUCCESS;
3467}
3468
3469
3470/** Opcode 0x0f 0x71 11/2. */
3471FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3472
3473/** Opcode 0x66 0x0f 0x71 11/2. */
3474FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3475
3476/** Opcode 0x0f 0x71 11/4. */
3477FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3478
3479/** Opcode 0x66 0x0f 0x71 11/4. */
3480FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3481
3482/** Opcode 0x0f 0x71 11/6. */
3483FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3484
3485/** Opcode 0x66 0x0f 0x71 11/6. */
3486FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3487
3488
3489/** Opcode 0x0f 0x71. */
3490FNIEMOP_DEF(iemOp_Grp12)
3491{
3492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3493 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3494 return IEMOP_RAISE_INVALID_OPCODE();
3495 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3496 {
3497 case 0: case 1: case 3: case 5: case 7:
3498 return IEMOP_RAISE_INVALID_OPCODE();
3499 case 2:
3500 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3501 {
3502 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3503 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3504 default: return IEMOP_RAISE_INVALID_OPCODE();
3505 }
3506 case 4:
3507 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3508 {
3509 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3510 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3511 default: return IEMOP_RAISE_INVALID_OPCODE();
3512 }
3513 case 6:
3514 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3515 {
3516 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3517 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3518 default: return IEMOP_RAISE_INVALID_OPCODE();
3519 }
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522}
3523
3524
3525/** Opcode 0x0f 0x72 11/2. */
3526FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3527
3528/** Opcode 0x66 0x0f 0x72 11/2. */
3529FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3530
3531/** Opcode 0x0f 0x72 11/4. */
3532FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3533
3534/** Opcode 0x66 0x0f 0x72 11/4. */
3535FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3536
3537/** Opcode 0x0f 0x72 11/6. */
3538FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3539
3540/** Opcode 0x66 0x0f 0x72 11/6. */
3541FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3542
3543
3544/** Opcode 0x0f 0x72. */
3545FNIEMOP_DEF(iemOp_Grp13)
3546{
3547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3548 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3549 return IEMOP_RAISE_INVALID_OPCODE();
3550 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3551 {
3552 case 0: case 1: case 3: case 5: case 7:
3553 return IEMOP_RAISE_INVALID_OPCODE();
3554 case 2:
3555 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3556 {
3557 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3558 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3559 default: return IEMOP_RAISE_INVALID_OPCODE();
3560 }
3561 case 4:
3562 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3563 {
3564 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3565 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3566 default: return IEMOP_RAISE_INVALID_OPCODE();
3567 }
3568 case 6:
3569 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3570 {
3571 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3572 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3573 default: return IEMOP_RAISE_INVALID_OPCODE();
3574 }
3575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3576 }
3577}
3578
3579
3580/** Opcode 0x0f 0x73 11/2. */
3581FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3582
3583/** Opcode 0x66 0x0f 0x73 11/2. */
3584FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3585
3586/** Opcode 0x66 0x0f 0x73 11/3. */
3587FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3588
3589/** Opcode 0x0f 0x73 11/6. */
3590FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3591
3592/** Opcode 0x66 0x0f 0x73 11/6. */
3593FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3594
3595/** Opcode 0x66 0x0f 0x73 11/7. */
3596FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3597
3598
3599/** Opcode 0x0f 0x73. */
3600FNIEMOP_DEF(iemOp_Grp14)
3601{
3602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3603 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3604 return IEMOP_RAISE_INVALID_OPCODE();
3605 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3606 {
3607 case 0: case 1: case 4: case 5:
3608 return IEMOP_RAISE_INVALID_OPCODE();
3609 case 2:
3610 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3611 {
3612 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3613 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3614 default: return IEMOP_RAISE_INVALID_OPCODE();
3615 }
3616 case 3:
3617 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3618 {
3619 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3620 default: return IEMOP_RAISE_INVALID_OPCODE();
3621 }
3622 case 6:
3623 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3624 {
3625 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3626 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3627 default: return IEMOP_RAISE_INVALID_OPCODE();
3628 }
3629 case 7:
3630 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3631 {
3632 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3633 default: return IEMOP_RAISE_INVALID_OPCODE();
3634 }
3635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3636 }
3637}
3638
3639
3640/**
3641 * Common worker for MMX instructions on the form:
3642 * pxxx mm1, mm2/mem64
3643 */
3644FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3645{
3646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3648 {
3649 /*
3650 * Register, register.
3651 */
3652 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3653 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3655 IEM_MC_BEGIN(2, 0);
3656 IEM_MC_ARG(uint64_t *, pDst, 0);
3657 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3658 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3659 IEM_MC_PREPARE_FPU_USAGE();
3660 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3661 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3662 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 /*
3669 * Register, memory.
3670 */
3671 IEM_MC_BEGIN(2, 2);
3672 IEM_MC_ARG(uint64_t *, pDst, 0);
3673 IEM_MC_LOCAL(uint64_t, uSrc);
3674 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3676
3677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3679 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3680 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3681
3682 IEM_MC_PREPARE_FPU_USAGE();
3683 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3684 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3685
3686 IEM_MC_ADVANCE_RIP();
3687 IEM_MC_END();
3688 }
3689 return VINF_SUCCESS;
3690}
3691
3692
3693/**
3694 * Common worker for SSE2 instructions on the forms:
3695 * pxxx xmm1, xmm2/mem128
3696 *
3697 * Proper alignment of the 128-bit operand is enforced.
3698 * Exceptions type 4. SSE2 cpuid checks.
3699 */
3700FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3701{
3702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3703 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3704 {
3705 /*
3706 * Register, register.
3707 */
3708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3709 IEM_MC_BEGIN(2, 0);
3710 IEM_MC_ARG(uint128_t *, pDst, 0);
3711 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3712 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3713 IEM_MC_PREPARE_SSE_USAGE();
3714 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3715 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3716 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3717 IEM_MC_ADVANCE_RIP();
3718 IEM_MC_END();
3719 }
3720 else
3721 {
3722 /*
3723 * Register, memory.
3724 */
3725 IEM_MC_BEGIN(2, 2);
3726 IEM_MC_ARG(uint128_t *, pDst, 0);
3727 IEM_MC_LOCAL(uint128_t, uSrc);
3728 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3730
3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3733 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3734 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3735
3736 IEM_MC_PREPARE_SSE_USAGE();
3737 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3738 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3739
3740 IEM_MC_ADVANCE_RIP();
3741 IEM_MC_END();
3742 }
3743 return VINF_SUCCESS;
3744}
3745
3746
3747/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3748FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3749{
3750 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3751 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3752}
3753
3754/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3755FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3756{
3757 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3758 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3759}
3760
3761/* Opcode 0xf3 0x0f 0x74 - invalid */
3762/* Opcode 0xf2 0x0f 0x74 - invalid */
3763
3764
3765/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3766FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3767{
3768 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3769 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3770}
3771
3772/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3773FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3774{
3775 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3776 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3777}
3778
3779/* Opcode 0xf3 0x0f 0x75 - invalid */
3780/* Opcode 0xf2 0x0f 0x75 - invalid */
3781
3782
3783/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3784FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3785{
3786 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3787 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3788}
3789
3790/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3791FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3792{
3793 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3794 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3795}
3796
3797/* Opcode 0xf3 0x0f 0x76 - invalid */
3798/* Opcode 0xf2 0x0f 0x76 - invalid */
3799
3800
3801/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3802FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3803/* Opcode 0x66 0x0f 0x77 - invalid */
3804/* Opcode 0xf3 0x0f 0x77 - invalid */
3805/* Opcode 0xf2 0x0f 0x77 - invalid */
3806
3807/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3808FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3809/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3810FNIEMOP_STUB(iemOp_AmdGrp17);
3811/* Opcode 0xf3 0x0f 0x78 - invalid */
3812/* Opcode 0xf2 0x0f 0x78 - invalid */
3813
3814/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3815FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3816/* Opcode 0x66 0x0f 0x79 - invalid */
3817/* Opcode 0xf3 0x0f 0x79 - invalid */
3818/* Opcode 0xf2 0x0f 0x79 - invalid */
3819
3820/* Opcode 0x0f 0x7a - invalid */
3821/* Opcode 0x66 0x0f 0x7a - invalid */
3822/* Opcode 0xf3 0x0f 0x7a - invalid */
3823/* Opcode 0xf2 0x0f 0x7a - invalid */
3824
3825/* Opcode 0x0f 0x7b - invalid */
3826/* Opcode 0x66 0x0f 0x7b - invalid */
3827/* Opcode 0xf3 0x0f 0x7b - invalid */
3828/* Opcode 0xf2 0x0f 0x7b - invalid */
3829
3830/* Opcode 0x0f 0x7c - invalid */
3831/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3832FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3833/* Opcode 0xf3 0x0f 0x7c - invalid */
3834/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3835FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3836
3837/* Opcode 0x0f 0x7d - invalid */
3838/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3839FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3840/* Opcode 0xf3 0x0f 0x7d - invalid */
3841/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3842FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3843
3844
3845/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3846FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3847{
3848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3849 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3850 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3851 else
3852 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3853 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3854 {
3855 /* greg, MMX */
3856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3857 IEM_MC_BEGIN(0, 1);
3858 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3859 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3860 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3861 {
3862 IEM_MC_LOCAL(uint64_t, u64Tmp);
3863 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3864 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3865 }
3866 else
3867 {
3868 IEM_MC_LOCAL(uint32_t, u32Tmp);
3869 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3870 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3871 }
3872 IEM_MC_ADVANCE_RIP();
3873 IEM_MC_END();
3874 }
3875 else
3876 {
3877 /* [mem], MMX */
3878 IEM_MC_BEGIN(0, 2);
3879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3880 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3883 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3884 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3885 {
3886 IEM_MC_LOCAL(uint64_t, u64Tmp);
3887 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3888 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3889 }
3890 else
3891 {
3892 IEM_MC_LOCAL(uint32_t, u32Tmp);
3893 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3894 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3895 }
3896 IEM_MC_ADVANCE_RIP();
3897 IEM_MC_END();
3898 }
3899 return VINF_SUCCESS;
3900}
3901
3902/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3903FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3904{
3905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3906 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3907 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3908 else
3909 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3911 {
3912 /* greg, XMM */
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3914 IEM_MC_BEGIN(0, 1);
3915 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3916 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3917 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3918 {
3919 IEM_MC_LOCAL(uint64_t, u64Tmp);
3920 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3921 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3922 }
3923 else
3924 {
3925 IEM_MC_LOCAL(uint32_t, u32Tmp);
3926 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3927 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3928 }
3929 IEM_MC_ADVANCE_RIP();
3930 IEM_MC_END();
3931 }
3932 else
3933 {
3934 /* [mem], XMM */
3935 IEM_MC_BEGIN(0, 2);
3936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3937 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3941 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3942 {
3943 IEM_MC_LOCAL(uint64_t, u64Tmp);
3944 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3946 }
3947 else
3948 {
3949 IEM_MC_LOCAL(uint32_t, u32Tmp);
3950 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3951 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3952 }
3953 IEM_MC_ADVANCE_RIP();
3954 IEM_MC_END();
3955 }
3956 return VINF_SUCCESS;
3957}
3958
3959/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3960FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3961/* Opcode 0xf2 0x0f 0x7e - invalid */
3962
3963
3964/** Opcode 0x0f 0x7f. */
3965FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3966{
3967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3968 bool fAligned = false;
3969 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3970 {
3971 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3972 fAligned = true;
3973 /* fall thru */
3974 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3975 if (fAligned)
3976 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3977 else
3978 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3979 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3980 {
3981 /*
3982 * Register, register.
3983 */
3984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3985 IEM_MC_BEGIN(0, 0);
3986 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3987 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3988 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3989 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3990 IEM_MC_ADVANCE_RIP();
3991 IEM_MC_END();
3992 }
3993 else
3994 {
3995 /*
3996 * Register, memory.
3997 */
3998 IEM_MC_BEGIN(0, 2);
3999 IEM_MC_LOCAL(uint128_t, u128Tmp);
4000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4001
4002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4004 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4005 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4006
4007 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4008 if (fAligned)
4009 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4010 else
4011 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4012
4013 IEM_MC_ADVANCE_RIP();
4014 IEM_MC_END();
4015 }
4016 return VINF_SUCCESS;
4017
4018 case 0: /* MMX */
4019 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4020
4021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4022 {
4023 /*
4024 * Register, register.
4025 */
4026 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4027 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4029 IEM_MC_BEGIN(0, 1);
4030 IEM_MC_LOCAL(uint64_t, u64Tmp);
4031 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4032 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4033 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4034 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4035 IEM_MC_ADVANCE_RIP();
4036 IEM_MC_END();
4037 }
4038 else
4039 {
4040 /*
4041 * Register, memory.
4042 */
4043 IEM_MC_BEGIN(0, 2);
4044 IEM_MC_LOCAL(uint64_t, u64Tmp);
4045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4046
4047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4049 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4050 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4051
4052 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4053 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4054
4055 IEM_MC_ADVANCE_RIP();
4056 IEM_MC_END();
4057 }
4058 return VINF_SUCCESS;
4059
4060 default:
4061 return IEMOP_RAISE_INVALID_OPCODE();
4062 }
4063}
4064
4065
4066
4067/** Opcode 0x0f 0x80. */
4068FNIEMOP_DEF(iemOp_jo_Jv)
4069{
4070 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4071 IEMOP_HLP_MIN_386();
4072 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4073 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4074 {
4075 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4077
4078 IEM_MC_BEGIN(0, 0);
4079 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4080 IEM_MC_REL_JMP_S16(i16Imm);
4081 } IEM_MC_ELSE() {
4082 IEM_MC_ADVANCE_RIP();
4083 } IEM_MC_ENDIF();
4084 IEM_MC_END();
4085 }
4086 else
4087 {
4088 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4090
4091 IEM_MC_BEGIN(0, 0);
4092 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4093 IEM_MC_REL_JMP_S32(i32Imm);
4094 } IEM_MC_ELSE() {
4095 IEM_MC_ADVANCE_RIP();
4096 } IEM_MC_ENDIF();
4097 IEM_MC_END();
4098 }
4099 return VINF_SUCCESS;
4100}
4101
4102
4103/** Opcode 0x0f 0x81. */
4104FNIEMOP_DEF(iemOp_jno_Jv)
4105{
4106 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4107 IEMOP_HLP_MIN_386();
4108 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4109 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4110 {
4111 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4113
4114 IEM_MC_BEGIN(0, 0);
4115 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4116 IEM_MC_ADVANCE_RIP();
4117 } IEM_MC_ELSE() {
4118 IEM_MC_REL_JMP_S16(i16Imm);
4119 } IEM_MC_ENDIF();
4120 IEM_MC_END();
4121 }
4122 else
4123 {
4124 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4126
4127 IEM_MC_BEGIN(0, 0);
4128 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4129 IEM_MC_ADVANCE_RIP();
4130 } IEM_MC_ELSE() {
4131 IEM_MC_REL_JMP_S32(i32Imm);
4132 } IEM_MC_ENDIF();
4133 IEM_MC_END();
4134 }
4135 return VINF_SUCCESS;
4136}
4137
4138
4139/** Opcode 0x0f 0x82. */
4140FNIEMOP_DEF(iemOp_jc_Jv)
4141{
4142 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4143 IEMOP_HLP_MIN_386();
4144 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4145 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4146 {
4147 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4149
4150 IEM_MC_BEGIN(0, 0);
4151 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4152 IEM_MC_REL_JMP_S16(i16Imm);
4153 } IEM_MC_ELSE() {
4154 IEM_MC_ADVANCE_RIP();
4155 } IEM_MC_ENDIF();
4156 IEM_MC_END();
4157 }
4158 else
4159 {
4160 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162
4163 IEM_MC_BEGIN(0, 0);
4164 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4165 IEM_MC_REL_JMP_S32(i32Imm);
4166 } IEM_MC_ELSE() {
4167 IEM_MC_ADVANCE_RIP();
4168 } IEM_MC_ENDIF();
4169 IEM_MC_END();
4170 }
4171 return VINF_SUCCESS;
4172}
4173
4174
4175/** Opcode 0x0f 0x83. */
4176FNIEMOP_DEF(iemOp_jnc_Jv)
4177{
4178 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4179 IEMOP_HLP_MIN_386();
4180 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4181 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4182 {
4183 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4185
4186 IEM_MC_BEGIN(0, 0);
4187 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4188 IEM_MC_ADVANCE_RIP();
4189 } IEM_MC_ELSE() {
4190 IEM_MC_REL_JMP_S16(i16Imm);
4191 } IEM_MC_ENDIF();
4192 IEM_MC_END();
4193 }
4194 else
4195 {
4196 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4198
4199 IEM_MC_BEGIN(0, 0);
4200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4201 IEM_MC_ADVANCE_RIP();
4202 } IEM_MC_ELSE() {
4203 IEM_MC_REL_JMP_S32(i32Imm);
4204 } IEM_MC_ENDIF();
4205 IEM_MC_END();
4206 }
4207 return VINF_SUCCESS;
4208}
4209
4210
4211/** Opcode 0x0f 0x84. */
4212FNIEMOP_DEF(iemOp_je_Jv)
4213{
4214 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4215 IEMOP_HLP_MIN_386();
4216 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4217 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4218 {
4219 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4221
4222 IEM_MC_BEGIN(0, 0);
4223 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4224 IEM_MC_REL_JMP_S16(i16Imm);
4225 } IEM_MC_ELSE() {
4226 IEM_MC_ADVANCE_RIP();
4227 } IEM_MC_ENDIF();
4228 IEM_MC_END();
4229 }
4230 else
4231 {
4232 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4234
4235 IEM_MC_BEGIN(0, 0);
4236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4237 IEM_MC_REL_JMP_S32(i32Imm);
4238 } IEM_MC_ELSE() {
4239 IEM_MC_ADVANCE_RIP();
4240 } IEM_MC_ENDIF();
4241 IEM_MC_END();
4242 }
4243 return VINF_SUCCESS;
4244}
4245
4246
4247/** Opcode 0x0f 0x85. */
4248FNIEMOP_DEF(iemOp_jne_Jv)
4249{
4250 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4251 IEMOP_HLP_MIN_386();
4252 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4253 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4254 {
4255 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4257
4258 IEM_MC_BEGIN(0, 0);
4259 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4260 IEM_MC_ADVANCE_RIP();
4261 } IEM_MC_ELSE() {
4262 IEM_MC_REL_JMP_S16(i16Imm);
4263 } IEM_MC_ENDIF();
4264 IEM_MC_END();
4265 }
4266 else
4267 {
4268 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4270
4271 IEM_MC_BEGIN(0, 0);
4272 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4273 IEM_MC_ADVANCE_RIP();
4274 } IEM_MC_ELSE() {
4275 IEM_MC_REL_JMP_S32(i32Imm);
4276 } IEM_MC_ENDIF();
4277 IEM_MC_END();
4278 }
4279 return VINF_SUCCESS;
4280}
4281
4282
4283/** Opcode 0x0f 0x86. */
4284FNIEMOP_DEF(iemOp_jbe_Jv)
4285{
4286 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4287 IEMOP_HLP_MIN_386();
4288 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4289 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4290 {
4291 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4293
4294 IEM_MC_BEGIN(0, 0);
4295 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4296 IEM_MC_REL_JMP_S16(i16Imm);
4297 } IEM_MC_ELSE() {
4298 IEM_MC_ADVANCE_RIP();
4299 } IEM_MC_ENDIF();
4300 IEM_MC_END();
4301 }
4302 else
4303 {
4304 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4306
4307 IEM_MC_BEGIN(0, 0);
4308 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4309 IEM_MC_REL_JMP_S32(i32Imm);
4310 } IEM_MC_ELSE() {
4311 IEM_MC_ADVANCE_RIP();
4312 } IEM_MC_ENDIF();
4313 IEM_MC_END();
4314 }
4315 return VINF_SUCCESS;
4316}
4317
4318
4319/** Opcode 0x0f 0x87. */
4320FNIEMOP_DEF(iemOp_jnbe_Jv)
4321{
4322 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4323 IEMOP_HLP_MIN_386();
4324 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4325 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4326 {
4327 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4329
4330 IEM_MC_BEGIN(0, 0);
4331 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4332 IEM_MC_ADVANCE_RIP();
4333 } IEM_MC_ELSE() {
4334 IEM_MC_REL_JMP_S16(i16Imm);
4335 } IEM_MC_ENDIF();
4336 IEM_MC_END();
4337 }
4338 else
4339 {
4340 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4342
4343 IEM_MC_BEGIN(0, 0);
4344 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4345 IEM_MC_ADVANCE_RIP();
4346 } IEM_MC_ELSE() {
4347 IEM_MC_REL_JMP_S32(i32Imm);
4348 } IEM_MC_ENDIF();
4349 IEM_MC_END();
4350 }
4351 return VINF_SUCCESS;
4352}
4353
4354
4355/** Opcode 0x0f 0x88. */
4356FNIEMOP_DEF(iemOp_js_Jv)
4357{
4358 IEMOP_MNEMONIC(js_Jv, "js Jv");
4359 IEMOP_HLP_MIN_386();
4360 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4361 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4362 {
4363 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4365
4366 IEM_MC_BEGIN(0, 0);
4367 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4368 IEM_MC_REL_JMP_S16(i16Imm);
4369 } IEM_MC_ELSE() {
4370 IEM_MC_ADVANCE_RIP();
4371 } IEM_MC_ENDIF();
4372 IEM_MC_END();
4373 }
4374 else
4375 {
4376 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4378
4379 IEM_MC_BEGIN(0, 0);
4380 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4381 IEM_MC_REL_JMP_S32(i32Imm);
4382 } IEM_MC_ELSE() {
4383 IEM_MC_ADVANCE_RIP();
4384 } IEM_MC_ENDIF();
4385 IEM_MC_END();
4386 }
4387 return VINF_SUCCESS;
4388}
4389
4390
4391/** Opcode 0x0f 0x89. */
4392FNIEMOP_DEF(iemOp_jns_Jv)
4393{
4394 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4395 IEMOP_HLP_MIN_386();
4396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4397 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4398 {
4399 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4401
4402 IEM_MC_BEGIN(0, 0);
4403 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4404 IEM_MC_ADVANCE_RIP();
4405 } IEM_MC_ELSE() {
4406 IEM_MC_REL_JMP_S16(i16Imm);
4407 } IEM_MC_ENDIF();
4408 IEM_MC_END();
4409 }
4410 else
4411 {
4412 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4414
4415 IEM_MC_BEGIN(0, 0);
4416 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4417 IEM_MC_ADVANCE_RIP();
4418 } IEM_MC_ELSE() {
4419 IEM_MC_REL_JMP_S32(i32Imm);
4420 } IEM_MC_ENDIF();
4421 IEM_MC_END();
4422 }
4423 return VINF_SUCCESS;
4424}
4425
4426
4427/** Opcode 0x0f 0x8a. */
4428FNIEMOP_DEF(iemOp_jp_Jv)
4429{
4430 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4431 IEMOP_HLP_MIN_386();
4432 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4433 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4434 {
4435 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4437
4438 IEM_MC_BEGIN(0, 0);
4439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4440 IEM_MC_REL_JMP_S16(i16Imm);
4441 } IEM_MC_ELSE() {
4442 IEM_MC_ADVANCE_RIP();
4443 } IEM_MC_ENDIF();
4444 IEM_MC_END();
4445 }
4446 else
4447 {
4448 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4450
4451 IEM_MC_BEGIN(0, 0);
4452 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4453 IEM_MC_REL_JMP_S32(i32Imm);
4454 } IEM_MC_ELSE() {
4455 IEM_MC_ADVANCE_RIP();
4456 } IEM_MC_ENDIF();
4457 IEM_MC_END();
4458 }
4459 return VINF_SUCCESS;
4460}
4461
4462
4463/** Opcode 0x0f 0x8b. */
4464FNIEMOP_DEF(iemOp_jnp_Jv)
4465{
4466 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4467 IEMOP_HLP_MIN_386();
4468 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4469 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4470 {
4471 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4473
4474 IEM_MC_BEGIN(0, 0);
4475 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4476 IEM_MC_ADVANCE_RIP();
4477 } IEM_MC_ELSE() {
4478 IEM_MC_REL_JMP_S16(i16Imm);
4479 } IEM_MC_ENDIF();
4480 IEM_MC_END();
4481 }
4482 else
4483 {
4484 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4486
4487 IEM_MC_BEGIN(0, 0);
4488 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4489 IEM_MC_ADVANCE_RIP();
4490 } IEM_MC_ELSE() {
4491 IEM_MC_REL_JMP_S32(i32Imm);
4492 } IEM_MC_ENDIF();
4493 IEM_MC_END();
4494 }
4495 return VINF_SUCCESS;
4496}
4497
4498
4499/** Opcode 0x0f 0x8c. */
4500FNIEMOP_DEF(iemOp_jl_Jv)
4501{
4502 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4503 IEMOP_HLP_MIN_386();
4504 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4505 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4506 {
4507 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4509
4510 IEM_MC_BEGIN(0, 0);
4511 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4512 IEM_MC_REL_JMP_S16(i16Imm);
4513 } IEM_MC_ELSE() {
4514 IEM_MC_ADVANCE_RIP();
4515 } IEM_MC_ENDIF();
4516 IEM_MC_END();
4517 }
4518 else
4519 {
4520 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4522
4523 IEM_MC_BEGIN(0, 0);
4524 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4525 IEM_MC_REL_JMP_S32(i32Imm);
4526 } IEM_MC_ELSE() {
4527 IEM_MC_ADVANCE_RIP();
4528 } IEM_MC_ENDIF();
4529 IEM_MC_END();
4530 }
4531 return VINF_SUCCESS;
4532}
4533
4534
4535/** Opcode 0x0f 0x8d. */
4536FNIEMOP_DEF(iemOp_jnl_Jv)
4537{
4538 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4539 IEMOP_HLP_MIN_386();
4540 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4541 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4542 {
4543 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4545
4546 IEM_MC_BEGIN(0, 0);
4547 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4548 IEM_MC_ADVANCE_RIP();
4549 } IEM_MC_ELSE() {
4550 IEM_MC_REL_JMP_S16(i16Imm);
4551 } IEM_MC_ENDIF();
4552 IEM_MC_END();
4553 }
4554 else
4555 {
4556 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4558
4559 IEM_MC_BEGIN(0, 0);
4560 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4561 IEM_MC_ADVANCE_RIP();
4562 } IEM_MC_ELSE() {
4563 IEM_MC_REL_JMP_S32(i32Imm);
4564 } IEM_MC_ENDIF();
4565 IEM_MC_END();
4566 }
4567 return VINF_SUCCESS;
4568}
4569
4570
4571/** Opcode 0x0f 0x8e. */
4572FNIEMOP_DEF(iemOp_jle_Jv)
4573{
4574 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4575 IEMOP_HLP_MIN_386();
4576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4577 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4578 {
4579 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4581
4582 IEM_MC_BEGIN(0, 0);
4583 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4584 IEM_MC_REL_JMP_S16(i16Imm);
4585 } IEM_MC_ELSE() {
4586 IEM_MC_ADVANCE_RIP();
4587 } IEM_MC_ENDIF();
4588 IEM_MC_END();
4589 }
4590 else
4591 {
4592 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4594
4595 IEM_MC_BEGIN(0, 0);
4596 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4597 IEM_MC_REL_JMP_S32(i32Imm);
4598 } IEM_MC_ELSE() {
4599 IEM_MC_ADVANCE_RIP();
4600 } IEM_MC_ENDIF();
4601 IEM_MC_END();
4602 }
4603 return VINF_SUCCESS;
4604}
4605
4606
4607/** Opcode 0x0f 0x8f. */
4608FNIEMOP_DEF(iemOp_jnle_Jv)
4609{
4610 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4611 IEMOP_HLP_MIN_386();
4612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4613 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4614 {
4615 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4617
4618 IEM_MC_BEGIN(0, 0);
4619 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4620 IEM_MC_ADVANCE_RIP();
4621 } IEM_MC_ELSE() {
4622 IEM_MC_REL_JMP_S16(i16Imm);
4623 } IEM_MC_ENDIF();
4624 IEM_MC_END();
4625 }
4626 else
4627 {
4628 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4630
4631 IEM_MC_BEGIN(0, 0);
4632 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4633 IEM_MC_ADVANCE_RIP();
4634 } IEM_MC_ELSE() {
4635 IEM_MC_REL_JMP_S32(i32Imm);
4636 } IEM_MC_ENDIF();
4637 IEM_MC_END();
4638 }
4639 return VINF_SUCCESS;
4640}
4641
4642
4643/** Opcode 0x0f 0x90. */
4644FNIEMOP_DEF(iemOp_seto_Eb)
4645{
4646 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4647 IEMOP_HLP_MIN_386();
4648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4649
4650 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4651 * any way. AMD says it's "unused", whatever that means. We're
4652 * ignoring for now. */
4653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4654 {
4655 /* register target */
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4657 IEM_MC_BEGIN(0, 0);
4658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4659 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4660 } IEM_MC_ELSE() {
4661 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4662 } IEM_MC_ENDIF();
4663 IEM_MC_ADVANCE_RIP();
4664 IEM_MC_END();
4665 }
4666 else
4667 {
4668 /* memory target */
4669 IEM_MC_BEGIN(0, 1);
4670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4673 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4674 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4675 } IEM_MC_ELSE() {
4676 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4677 } IEM_MC_ENDIF();
4678 IEM_MC_ADVANCE_RIP();
4679 IEM_MC_END();
4680 }
4681 return VINF_SUCCESS;
4682}
4683
4684
4685/** Opcode 0x0f 0x91. */
4686FNIEMOP_DEF(iemOp_setno_Eb)
4687{
4688 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4689 IEMOP_HLP_MIN_386();
4690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4691
4692 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4693 * any way. AMD says it's "unused", whatever that means. We're
4694 * ignoring for now. */
4695 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4696 {
4697 /* register target */
4698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4699 IEM_MC_BEGIN(0, 0);
4700 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4701 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4702 } IEM_MC_ELSE() {
4703 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4704 } IEM_MC_ENDIF();
4705 IEM_MC_ADVANCE_RIP();
4706 IEM_MC_END();
4707 }
4708 else
4709 {
4710 /* memory target */
4711 IEM_MC_BEGIN(0, 1);
4712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4715 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4716 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4717 } IEM_MC_ELSE() {
4718 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4719 } IEM_MC_ENDIF();
4720 IEM_MC_ADVANCE_RIP();
4721 IEM_MC_END();
4722 }
4723 return VINF_SUCCESS;
4724}
4725
4726
4727/** Opcode 0x0f 0x92. */
4728FNIEMOP_DEF(iemOp_setc_Eb)
4729{
4730 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4731 IEMOP_HLP_MIN_386();
4732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4733
4734 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4735 * any way. AMD says it's "unused", whatever that means. We're
4736 * ignoring for now. */
4737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4738 {
4739 /* register target */
4740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4741 IEM_MC_BEGIN(0, 0);
4742 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4743 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4744 } IEM_MC_ELSE() {
4745 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4746 } IEM_MC_ENDIF();
4747 IEM_MC_ADVANCE_RIP();
4748 IEM_MC_END();
4749 }
4750 else
4751 {
4752 /* memory target */
4753 IEM_MC_BEGIN(0, 1);
4754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4757 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4758 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4759 } IEM_MC_ELSE() {
4760 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4761 } IEM_MC_ENDIF();
4762 IEM_MC_ADVANCE_RIP();
4763 IEM_MC_END();
4764 }
4765 return VINF_SUCCESS;
4766}
4767
4768
4769/** Opcode 0x0f 0x93. */
4770FNIEMOP_DEF(iemOp_setnc_Eb)
4771{
4772 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4773 IEMOP_HLP_MIN_386();
4774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4775
4776 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4777 * any way. AMD says it's "unused", whatever that means. We're
4778 * ignoring for now. */
4779 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4780 {
4781 /* register target */
4782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4783 IEM_MC_BEGIN(0, 0);
4784 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4785 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4786 } IEM_MC_ELSE() {
4787 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4788 } IEM_MC_ENDIF();
4789 IEM_MC_ADVANCE_RIP();
4790 IEM_MC_END();
4791 }
4792 else
4793 {
4794 /* memory target */
4795 IEM_MC_BEGIN(0, 1);
4796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4799 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4800 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4801 } IEM_MC_ELSE() {
4802 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4803 } IEM_MC_ENDIF();
4804 IEM_MC_ADVANCE_RIP();
4805 IEM_MC_END();
4806 }
4807 return VINF_SUCCESS;
4808}
4809
4810
4811/** Opcode 0x0f 0x94. */
4812FNIEMOP_DEF(iemOp_sete_Eb)
4813{
4814 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4815 IEMOP_HLP_MIN_386();
4816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4817
4818 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4819 * any way. AMD says it's "unused", whatever that means. We're
4820 * ignoring for now. */
4821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4822 {
4823 /* register target */
4824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4825 IEM_MC_BEGIN(0, 0);
4826 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4827 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4828 } IEM_MC_ELSE() {
4829 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4830 } IEM_MC_ENDIF();
4831 IEM_MC_ADVANCE_RIP();
4832 IEM_MC_END();
4833 }
4834 else
4835 {
4836 /* memory target */
4837 IEM_MC_BEGIN(0, 1);
4838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4841 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4842 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4843 } IEM_MC_ELSE() {
4844 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4845 } IEM_MC_ENDIF();
4846 IEM_MC_ADVANCE_RIP();
4847 IEM_MC_END();
4848 }
4849 return VINF_SUCCESS;
4850}
4851
4852
4853/** Opcode 0x0f 0x95. */
4854FNIEMOP_DEF(iemOp_setne_Eb)
4855{
4856 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4857 IEMOP_HLP_MIN_386();
4858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4859
4860 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4861 * any way. AMD says it's "unused", whatever that means. We're
4862 * ignoring for now. */
4863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4864 {
4865 /* register target */
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4867 IEM_MC_BEGIN(0, 0);
4868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4869 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4870 } IEM_MC_ELSE() {
4871 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4872 } IEM_MC_ENDIF();
4873 IEM_MC_ADVANCE_RIP();
4874 IEM_MC_END();
4875 }
4876 else
4877 {
4878 /* memory target */
4879 IEM_MC_BEGIN(0, 1);
4880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4884 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4885 } IEM_MC_ELSE() {
4886 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4887 } IEM_MC_ENDIF();
4888 IEM_MC_ADVANCE_RIP();
4889 IEM_MC_END();
4890 }
4891 return VINF_SUCCESS;
4892}
4893
4894
4895/** Opcode 0x0f 0x96. */
4896FNIEMOP_DEF(iemOp_setbe_Eb)
4897{
4898 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4899 IEMOP_HLP_MIN_386();
4900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4901
4902 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4903 * any way. AMD says it's "unused", whatever that means. We're
4904 * ignoring for now. */
4905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4906 {
4907 /* register target */
4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4909 IEM_MC_BEGIN(0, 0);
4910 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4911 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4912 } IEM_MC_ELSE() {
4913 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4914 } IEM_MC_ENDIF();
4915 IEM_MC_ADVANCE_RIP();
4916 IEM_MC_END();
4917 }
4918 else
4919 {
4920 /* memory target */
4921 IEM_MC_BEGIN(0, 1);
4922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4925 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4926 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4927 } IEM_MC_ELSE() {
4928 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4929 } IEM_MC_ENDIF();
4930 IEM_MC_ADVANCE_RIP();
4931 IEM_MC_END();
4932 }
4933 return VINF_SUCCESS;
4934}
4935
4936
4937/** Opcode 0x0f 0x97. */
4938FNIEMOP_DEF(iemOp_setnbe_Eb)
4939{
4940 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4941 IEMOP_HLP_MIN_386();
4942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4943
4944 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4945 * any way. AMD says it's "unused", whatever that means. We're
4946 * ignoring for now. */
4947 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4948 {
4949 /* register target */
4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4951 IEM_MC_BEGIN(0, 0);
4952 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4953 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4954 } IEM_MC_ELSE() {
4955 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4956 } IEM_MC_ENDIF();
4957 IEM_MC_ADVANCE_RIP();
4958 IEM_MC_END();
4959 }
4960 else
4961 {
4962 /* memory target */
4963 IEM_MC_BEGIN(0, 1);
4964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4967 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4968 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4969 } IEM_MC_ELSE() {
4970 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4971 } IEM_MC_ENDIF();
4972 IEM_MC_ADVANCE_RIP();
4973 IEM_MC_END();
4974 }
4975 return VINF_SUCCESS;
4976}
4977
4978
4979/** Opcode 0x0f 0x98. */
4980FNIEMOP_DEF(iemOp_sets_Eb)
4981{
4982 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4983 IEMOP_HLP_MIN_386();
4984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4985
4986 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4987 * any way. AMD says it's "unused", whatever that means. We're
4988 * ignoring for now. */
4989 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4990 {
4991 /* register target */
4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4993 IEM_MC_BEGIN(0, 0);
4994 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4995 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4996 } IEM_MC_ELSE() {
4997 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4998 } IEM_MC_ENDIF();
4999 IEM_MC_ADVANCE_RIP();
5000 IEM_MC_END();
5001 }
5002 else
5003 {
5004 /* memory target */
5005 IEM_MC_BEGIN(0, 1);
5006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5010 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5011 } IEM_MC_ELSE() {
5012 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5013 } IEM_MC_ENDIF();
5014 IEM_MC_ADVANCE_RIP();
5015 IEM_MC_END();
5016 }
5017 return VINF_SUCCESS;
5018}
5019
5020
5021/** Opcode 0x0f 0x99. */
5022FNIEMOP_DEF(iemOp_setns_Eb)
5023{
5024 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5025 IEMOP_HLP_MIN_386();
5026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5027
5028 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5029 * any way. AMD says it's "unused", whatever that means. We're
5030 * ignoring for now. */
5031 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5032 {
5033 /* register target */
5034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5035 IEM_MC_BEGIN(0, 0);
5036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5037 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5038 } IEM_MC_ELSE() {
5039 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5040 } IEM_MC_ENDIF();
5041 IEM_MC_ADVANCE_RIP();
5042 IEM_MC_END();
5043 }
5044 else
5045 {
5046 /* memory target */
5047 IEM_MC_BEGIN(0, 1);
5048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5051 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5052 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5053 } IEM_MC_ELSE() {
5054 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5055 } IEM_MC_ENDIF();
5056 IEM_MC_ADVANCE_RIP();
5057 IEM_MC_END();
5058 }
5059 return VINF_SUCCESS;
5060}
5061
5062
5063/** Opcode 0x0f 0x9a. */
5064FNIEMOP_DEF(iemOp_setp_Eb)
5065{
5066 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5067 IEMOP_HLP_MIN_386();
5068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5069
5070 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5071 * any way. AMD says it's "unused", whatever that means. We're
5072 * ignoring for now. */
5073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5074 {
5075 /* register target */
5076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5077 IEM_MC_BEGIN(0, 0);
5078 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5079 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5080 } IEM_MC_ELSE() {
5081 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5082 } IEM_MC_ENDIF();
5083 IEM_MC_ADVANCE_RIP();
5084 IEM_MC_END();
5085 }
5086 else
5087 {
5088 /* memory target */
5089 IEM_MC_BEGIN(0, 1);
5090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5094 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5095 } IEM_MC_ELSE() {
5096 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5097 } IEM_MC_ENDIF();
5098 IEM_MC_ADVANCE_RIP();
5099 IEM_MC_END();
5100 }
5101 return VINF_SUCCESS;
5102}
5103
5104
5105/** Opcode 0x0f 0x9b. */
5106FNIEMOP_DEF(iemOp_setnp_Eb)
5107{
5108 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5109 IEMOP_HLP_MIN_386();
5110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5111
5112 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5113 * any way. AMD says it's "unused", whatever that means. We're
5114 * ignoring for now. */
5115 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5116 {
5117 /* register target */
5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5119 IEM_MC_BEGIN(0, 0);
5120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5121 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5122 } IEM_MC_ELSE() {
5123 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5124 } IEM_MC_ENDIF();
5125 IEM_MC_ADVANCE_RIP();
5126 IEM_MC_END();
5127 }
5128 else
5129 {
5130 /* memory target */
5131 IEM_MC_BEGIN(0, 1);
5132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5136 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5137 } IEM_MC_ELSE() {
5138 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5139 } IEM_MC_ENDIF();
5140 IEM_MC_ADVANCE_RIP();
5141 IEM_MC_END();
5142 }
5143 return VINF_SUCCESS;
5144}
5145
5146
5147/** Opcode 0x0f 0x9c. */
5148FNIEMOP_DEF(iemOp_setl_Eb)
5149{
5150 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5151 IEMOP_HLP_MIN_386();
5152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5153
5154 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5155 * any way. AMD says it's "unused", whatever that means. We're
5156 * ignoring for now. */
5157 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5158 {
5159 /* register target */
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161 IEM_MC_BEGIN(0, 0);
5162 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5163 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5164 } IEM_MC_ELSE() {
5165 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5166 } IEM_MC_ENDIF();
5167 IEM_MC_ADVANCE_RIP();
5168 IEM_MC_END();
5169 }
5170 else
5171 {
5172 /* memory target */
5173 IEM_MC_BEGIN(0, 1);
5174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5177 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5178 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5179 } IEM_MC_ELSE() {
5180 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5181 } IEM_MC_ENDIF();
5182 IEM_MC_ADVANCE_RIP();
5183 IEM_MC_END();
5184 }
5185 return VINF_SUCCESS;
5186}
5187
5188
5189/** Opcode 0x0f 0x9d. */
5190FNIEMOP_DEF(iemOp_setnl_Eb)
5191{
5192 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5193 IEMOP_HLP_MIN_386();
5194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5195
5196 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5197 * any way. AMD says it's "unused", whatever that means. We're
5198 * ignoring for now. */
5199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5200 {
5201 /* register target */
5202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5203 IEM_MC_BEGIN(0, 0);
5204 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5205 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5206 } IEM_MC_ELSE() {
5207 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5208 } IEM_MC_ENDIF();
5209 IEM_MC_ADVANCE_RIP();
5210 IEM_MC_END();
5211 }
5212 else
5213 {
5214 /* memory target */
5215 IEM_MC_BEGIN(0, 1);
5216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5219 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5220 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5221 } IEM_MC_ELSE() {
5222 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5223 } IEM_MC_ENDIF();
5224 IEM_MC_ADVANCE_RIP();
5225 IEM_MC_END();
5226 }
5227 return VINF_SUCCESS;
5228}
5229
5230
5231/** Opcode 0x0f 0x9e. */
5232FNIEMOP_DEF(iemOp_setle_Eb)
5233{
5234 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5235 IEMOP_HLP_MIN_386();
5236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5237
5238 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5239 * any way. AMD says it's "unused", whatever that means. We're
5240 * ignoring for now. */
5241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5242 {
5243 /* register target */
5244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5245 IEM_MC_BEGIN(0, 0);
5246 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5247 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5248 } IEM_MC_ELSE() {
5249 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5250 } IEM_MC_ENDIF();
5251 IEM_MC_ADVANCE_RIP();
5252 IEM_MC_END();
5253 }
5254 else
5255 {
5256 /* memory target */
5257 IEM_MC_BEGIN(0, 1);
5258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5261 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5262 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5263 } IEM_MC_ELSE() {
5264 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5265 } IEM_MC_ENDIF();
5266 IEM_MC_ADVANCE_RIP();
5267 IEM_MC_END();
5268 }
5269 return VINF_SUCCESS;
5270}
5271
5272
5273/** Opcode 0x0f 0x9f. */
5274FNIEMOP_DEF(iemOp_setnle_Eb)
5275{
5276 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5277 IEMOP_HLP_MIN_386();
5278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5279
5280 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5281 * any way. AMD says it's "unused", whatever that means. We're
5282 * ignoring for now. */
5283 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5284 {
5285 /* register target */
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5287 IEM_MC_BEGIN(0, 0);
5288 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5289 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5290 } IEM_MC_ELSE() {
5291 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5292 } IEM_MC_ENDIF();
5293 IEM_MC_ADVANCE_RIP();
5294 IEM_MC_END();
5295 }
5296 else
5297 {
5298 /* memory target */
5299 IEM_MC_BEGIN(0, 1);
5300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5304 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5305 } IEM_MC_ELSE() {
5306 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5307 } IEM_MC_ENDIF();
5308 IEM_MC_ADVANCE_RIP();
5309 IEM_MC_END();
5310 }
5311 return VINF_SUCCESS;
5312}
5313
5314
5315/**
5316 * Common 'push segment-register' helper.
5317 */
5318FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5319{
5320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5321 if (iReg < X86_SREG_FS)
5322 IEMOP_HLP_NO_64BIT();
5323 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5324
5325 switch (pVCpu->iem.s.enmEffOpSize)
5326 {
5327 case IEMMODE_16BIT:
5328 IEM_MC_BEGIN(0, 1);
5329 IEM_MC_LOCAL(uint16_t, u16Value);
5330 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5331 IEM_MC_PUSH_U16(u16Value);
5332 IEM_MC_ADVANCE_RIP();
5333 IEM_MC_END();
5334 break;
5335
5336 case IEMMODE_32BIT:
5337 IEM_MC_BEGIN(0, 1);
5338 IEM_MC_LOCAL(uint32_t, u32Value);
5339 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5340 IEM_MC_PUSH_U32_SREG(u32Value);
5341 IEM_MC_ADVANCE_RIP();
5342 IEM_MC_END();
5343 break;
5344
5345 case IEMMODE_64BIT:
5346 IEM_MC_BEGIN(0, 1);
5347 IEM_MC_LOCAL(uint64_t, u64Value);
5348 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5349 IEM_MC_PUSH_U64(u64Value);
5350 IEM_MC_ADVANCE_RIP();
5351 IEM_MC_END();
5352 break;
5353 }
5354
5355 return VINF_SUCCESS;
5356}
5357
5358
5359/** Opcode 0x0f 0xa0. */
5360FNIEMOP_DEF(iemOp_push_fs)
5361{
5362 IEMOP_MNEMONIC(push_fs, "push fs");
5363 IEMOP_HLP_MIN_386();
5364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5365 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5366}
5367
5368
5369/** Opcode 0x0f 0xa1. */
5370FNIEMOP_DEF(iemOp_pop_fs)
5371{
5372 IEMOP_MNEMONIC(pop_fs, "pop fs");
5373 IEMOP_HLP_MIN_386();
5374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5375 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5376}
5377
5378
5379/** Opcode 0x0f 0xa2. */
5380FNIEMOP_DEF(iemOp_cpuid)
5381{
5382 IEMOP_MNEMONIC(cpuid, "cpuid");
5383 IEMOP_HLP_MIN_486(); /* not all 486es. */
5384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5385 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5386}
5387
5388
5389/**
5390 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5391 * iemOp_bts_Ev_Gv.
5392 */
5393FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5394{
5395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5397
5398 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5399 {
5400 /* register destination. */
5401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5402 switch (pVCpu->iem.s.enmEffOpSize)
5403 {
5404 case IEMMODE_16BIT:
5405 IEM_MC_BEGIN(3, 0);
5406 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5407 IEM_MC_ARG(uint16_t, u16Src, 1);
5408 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5409
5410 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5411 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5412 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5413 IEM_MC_REF_EFLAGS(pEFlags);
5414 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5415
5416 IEM_MC_ADVANCE_RIP();
5417 IEM_MC_END();
5418 return VINF_SUCCESS;
5419
5420 case IEMMODE_32BIT:
5421 IEM_MC_BEGIN(3, 0);
5422 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5423 IEM_MC_ARG(uint32_t, u32Src, 1);
5424 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5425
5426 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5427 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5428 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5429 IEM_MC_REF_EFLAGS(pEFlags);
5430 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5431
5432 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5433 IEM_MC_ADVANCE_RIP();
5434 IEM_MC_END();
5435 return VINF_SUCCESS;
5436
5437 case IEMMODE_64BIT:
5438 IEM_MC_BEGIN(3, 0);
5439 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5440 IEM_MC_ARG(uint64_t, u64Src, 1);
5441 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5442
5443 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5444 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5445 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5446 IEM_MC_REF_EFLAGS(pEFlags);
5447 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5448
5449 IEM_MC_ADVANCE_RIP();
5450 IEM_MC_END();
5451 return VINF_SUCCESS;
5452
5453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5454 }
5455 }
5456 else
5457 {
5458 /* memory destination. */
5459
5460 uint32_t fAccess;
5461 if (pImpl->pfnLockedU16)
5462 fAccess = IEM_ACCESS_DATA_RW;
5463 else /* BT */
5464 fAccess = IEM_ACCESS_DATA_R;
5465
5466 /** @todo test negative bit offsets! */
5467 switch (pVCpu->iem.s.enmEffOpSize)
5468 {
5469 case IEMMODE_16BIT:
5470 IEM_MC_BEGIN(3, 2);
5471 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5472 IEM_MC_ARG(uint16_t, u16Src, 1);
5473 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5475 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5476
5477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5478 if (pImpl->pfnLockedU16)
5479 IEMOP_HLP_DONE_DECODING();
5480 else
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5483 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5484 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5485 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5486 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5487 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5488 IEM_MC_FETCH_EFLAGS(EFlags);
5489
5490 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5491 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5492 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5493 else
5494 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5495 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5496
5497 IEM_MC_COMMIT_EFLAGS(EFlags);
5498 IEM_MC_ADVANCE_RIP();
5499 IEM_MC_END();
5500 return VINF_SUCCESS;
5501
5502 case IEMMODE_32BIT:
5503 IEM_MC_BEGIN(3, 2);
5504 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5505 IEM_MC_ARG(uint32_t, u32Src, 1);
5506 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5508 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5509
5510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5511 if (pImpl->pfnLockedU16)
5512 IEMOP_HLP_DONE_DECODING();
5513 else
5514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5515 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5516 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5517 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5518 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5519 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5520 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5521 IEM_MC_FETCH_EFLAGS(EFlags);
5522
5523 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5524 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5525 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5526 else
5527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5528 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5529
5530 IEM_MC_COMMIT_EFLAGS(EFlags);
5531 IEM_MC_ADVANCE_RIP();
5532 IEM_MC_END();
5533 return VINF_SUCCESS;
5534
5535 case IEMMODE_64BIT:
5536 IEM_MC_BEGIN(3, 2);
5537 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5538 IEM_MC_ARG(uint64_t, u64Src, 1);
5539 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5541 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5542
5543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5544 if (pImpl->pfnLockedU16)
5545 IEMOP_HLP_DONE_DECODING();
5546 else
5547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5548 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5549 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5550 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5551 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5552 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5553 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5554 IEM_MC_FETCH_EFLAGS(EFlags);
5555
5556 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5557 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5558 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5559 else
5560 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5561 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5562
5563 IEM_MC_COMMIT_EFLAGS(EFlags);
5564 IEM_MC_ADVANCE_RIP();
5565 IEM_MC_END();
5566 return VINF_SUCCESS;
5567
5568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5569 }
5570 }
5571}
5572
5573
5574/** Opcode 0x0f 0xa3. */
5575FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5576{
5577 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5578 IEMOP_HLP_MIN_386();
5579 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5580}
5581
5582
5583/**
5584 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5585 */
5586FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5587{
5588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5589 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5590
5591 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5592 {
5593 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5595
5596 switch (pVCpu->iem.s.enmEffOpSize)
5597 {
5598 case IEMMODE_16BIT:
5599 IEM_MC_BEGIN(4, 0);
5600 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5601 IEM_MC_ARG(uint16_t, u16Src, 1);
5602 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5603 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5604
5605 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5606 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5607 IEM_MC_REF_EFLAGS(pEFlags);
5608 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5609
5610 IEM_MC_ADVANCE_RIP();
5611 IEM_MC_END();
5612 return VINF_SUCCESS;
5613
5614 case IEMMODE_32BIT:
5615 IEM_MC_BEGIN(4, 0);
5616 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5617 IEM_MC_ARG(uint32_t, u32Src, 1);
5618 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5619 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5620
5621 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5622 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5623 IEM_MC_REF_EFLAGS(pEFlags);
5624 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5625
5626 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5627 IEM_MC_ADVANCE_RIP();
5628 IEM_MC_END();
5629 return VINF_SUCCESS;
5630
5631 case IEMMODE_64BIT:
5632 IEM_MC_BEGIN(4, 0);
5633 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5634 IEM_MC_ARG(uint64_t, u64Src, 1);
5635 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5636 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5637
5638 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5639 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5640 IEM_MC_REF_EFLAGS(pEFlags);
5641 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5642
5643 IEM_MC_ADVANCE_RIP();
5644 IEM_MC_END();
5645 return VINF_SUCCESS;
5646
5647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5648 }
5649 }
5650 else
5651 {
5652 switch (pVCpu->iem.s.enmEffOpSize)
5653 {
5654 case IEMMODE_16BIT:
5655 IEM_MC_BEGIN(4, 2);
5656 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5657 IEM_MC_ARG(uint16_t, u16Src, 1);
5658 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5659 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5661
5662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5663 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5664 IEM_MC_ASSIGN(cShiftArg, cShift);
5665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5666 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5667 IEM_MC_FETCH_EFLAGS(EFlags);
5668 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5669 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5670
5671 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5672 IEM_MC_COMMIT_EFLAGS(EFlags);
5673 IEM_MC_ADVANCE_RIP();
5674 IEM_MC_END();
5675 return VINF_SUCCESS;
5676
5677 case IEMMODE_32BIT:
5678 IEM_MC_BEGIN(4, 2);
5679 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5680 IEM_MC_ARG(uint32_t, u32Src, 1);
5681 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5682 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5684
5685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5686 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5687 IEM_MC_ASSIGN(cShiftArg, cShift);
5688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5689 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5690 IEM_MC_FETCH_EFLAGS(EFlags);
5691 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5692 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5693
5694 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5695 IEM_MC_COMMIT_EFLAGS(EFlags);
5696 IEM_MC_ADVANCE_RIP();
5697 IEM_MC_END();
5698 return VINF_SUCCESS;
5699
5700 case IEMMODE_64BIT:
5701 IEM_MC_BEGIN(4, 2);
5702 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5703 IEM_MC_ARG(uint64_t, u64Src, 1);
5704 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5705 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5707
5708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5709 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5710 IEM_MC_ASSIGN(cShiftArg, cShift);
5711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5712 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5713 IEM_MC_FETCH_EFLAGS(EFlags);
5714 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5715 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5716
5717 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5718 IEM_MC_COMMIT_EFLAGS(EFlags);
5719 IEM_MC_ADVANCE_RIP();
5720 IEM_MC_END();
5721 return VINF_SUCCESS;
5722
5723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5724 }
5725 }
5726}
5727
5728
5729/**
5730 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5731 */
5732FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5733{
5734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5735 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5736
5737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5738 {
5739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5740
5741 switch (pVCpu->iem.s.enmEffOpSize)
5742 {
5743 case IEMMODE_16BIT:
5744 IEM_MC_BEGIN(4, 0);
5745 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5746 IEM_MC_ARG(uint16_t, u16Src, 1);
5747 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5748 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5749
5750 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5751 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5752 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5753 IEM_MC_REF_EFLAGS(pEFlags);
5754 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5755
5756 IEM_MC_ADVANCE_RIP();
5757 IEM_MC_END();
5758 return VINF_SUCCESS;
5759
5760 case IEMMODE_32BIT:
5761 IEM_MC_BEGIN(4, 0);
5762 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5763 IEM_MC_ARG(uint32_t, u32Src, 1);
5764 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5765 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5766
5767 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5768 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5769 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5770 IEM_MC_REF_EFLAGS(pEFlags);
5771 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5772
5773 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5774 IEM_MC_ADVANCE_RIP();
5775 IEM_MC_END();
5776 return VINF_SUCCESS;
5777
5778 case IEMMODE_64BIT:
5779 IEM_MC_BEGIN(4, 0);
5780 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5781 IEM_MC_ARG(uint64_t, u64Src, 1);
5782 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5783 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5784
5785 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5786 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5787 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5788 IEM_MC_REF_EFLAGS(pEFlags);
5789 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5790
5791 IEM_MC_ADVANCE_RIP();
5792 IEM_MC_END();
5793 return VINF_SUCCESS;
5794
5795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5796 }
5797 }
5798 else
5799 {
5800 switch (pVCpu->iem.s.enmEffOpSize)
5801 {
5802 case IEMMODE_16BIT:
5803 IEM_MC_BEGIN(4, 2);
5804 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5805 IEM_MC_ARG(uint16_t, u16Src, 1);
5806 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5807 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5809
5810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5812 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5813 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5814 IEM_MC_FETCH_EFLAGS(EFlags);
5815 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5816 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5817
5818 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5819 IEM_MC_COMMIT_EFLAGS(EFlags);
5820 IEM_MC_ADVANCE_RIP();
5821 IEM_MC_END();
5822 return VINF_SUCCESS;
5823
5824 case IEMMODE_32BIT:
5825 IEM_MC_BEGIN(4, 2);
5826 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5827 IEM_MC_ARG(uint32_t, u32Src, 1);
5828 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5829 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5831
5832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5834 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5835 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5836 IEM_MC_FETCH_EFLAGS(EFlags);
5837 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5838 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5839
5840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5841 IEM_MC_COMMIT_EFLAGS(EFlags);
5842 IEM_MC_ADVANCE_RIP();
5843 IEM_MC_END();
5844 return VINF_SUCCESS;
5845
5846 case IEMMODE_64BIT:
5847 IEM_MC_BEGIN(4, 2);
5848 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5849 IEM_MC_ARG(uint64_t, u64Src, 1);
5850 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5851 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5853
5854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5856 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5857 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5858 IEM_MC_FETCH_EFLAGS(EFlags);
5859 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5860 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5861
5862 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5863 IEM_MC_COMMIT_EFLAGS(EFlags);
5864 IEM_MC_ADVANCE_RIP();
5865 IEM_MC_END();
5866 return VINF_SUCCESS;
5867
5868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5869 }
5870 }
5871}
5872
5873
5874
5875/** Opcode 0x0f 0xa4. */
5876FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5877{
5878 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5879 IEMOP_HLP_MIN_386();
5880 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5881}
5882
5883
5884/** Opcode 0x0f 0xa5. */
5885FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5886{
5887 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5888 IEMOP_HLP_MIN_386();
5889 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5890}
5891
5892
5893/** Opcode 0x0f 0xa8. */
5894FNIEMOP_DEF(iemOp_push_gs)
5895{
5896 IEMOP_MNEMONIC(push_gs, "push gs");
5897 IEMOP_HLP_MIN_386();
5898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5899 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5900}
5901
5902
5903/** Opcode 0x0f 0xa9. */
5904FNIEMOP_DEF(iemOp_pop_gs)
5905{
5906 IEMOP_MNEMONIC(pop_gs, "pop gs");
5907 IEMOP_HLP_MIN_386();
5908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5909 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5910}
5911
5912
5913/** Opcode 0x0f 0xaa. */
5914FNIEMOP_STUB(iemOp_rsm);
5915//IEMOP_HLP_MIN_386();
5916
5917
5918/** Opcode 0x0f 0xab. */
5919FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5920{
5921 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5922 IEMOP_HLP_MIN_386();
5923 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5924}
5925
5926
5927/** Opcode 0x0f 0xac. */
5928FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5929{
5930 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5931 IEMOP_HLP_MIN_386();
5932 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5933}
5934
5935
5936/** Opcode 0x0f 0xad. */
5937FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5938{
5939 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5940 IEMOP_HLP_MIN_386();
5941 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5942}
5943
5944
5945/** Opcode 0x0f 0xae mem/0. */
5946FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5947{
5948 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5949 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5950 return IEMOP_RAISE_INVALID_OPCODE();
5951
5952 IEM_MC_BEGIN(3, 1);
5953 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5954 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5955 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5958 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5959 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5960 IEM_MC_END();
5961 return VINF_SUCCESS;
5962}
5963
5964
5965/** Opcode 0x0f 0xae mem/1. */
5966FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5967{
5968 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5969 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5970 return IEMOP_RAISE_INVALID_OPCODE();
5971
5972 IEM_MC_BEGIN(3, 1);
5973 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5974 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5975 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5978 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5979 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5980 IEM_MC_END();
5981 return VINF_SUCCESS;
5982}
5983
5984
5985/** Opcode 0x0f 0xae mem/2. */
5986FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5987
5988/** Opcode 0x0f 0xae mem/3. */
5989FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5990
5991/** Opcode 0x0f 0xae mem/4. */
5992FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5993
5994/** Opcode 0x0f 0xae mem/5. */
5995FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5996
5997/** Opcode 0x0f 0xae mem/6. */
5998FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5999
6000/** Opcode 0x0f 0xae mem/7. */
6001FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
6002
6003
6004/** Opcode 0x0f 0xae 11b/5. */
6005FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6006{
6007 RT_NOREF_PV(bRm);
6008 IEMOP_MNEMONIC(lfence, "lfence");
6009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6010 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6011 return IEMOP_RAISE_INVALID_OPCODE();
6012
6013 IEM_MC_BEGIN(0, 0);
6014 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6015 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6016 else
6017 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6018 IEM_MC_ADVANCE_RIP();
6019 IEM_MC_END();
6020 return VINF_SUCCESS;
6021}
6022
6023
6024/** Opcode 0x0f 0xae 11b/6. */
6025FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6026{
6027 RT_NOREF_PV(bRm);
6028 IEMOP_MNEMONIC(mfence, "mfence");
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6031 return IEMOP_RAISE_INVALID_OPCODE();
6032
6033 IEM_MC_BEGIN(0, 0);
6034 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6035 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6036 else
6037 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6038 IEM_MC_ADVANCE_RIP();
6039 IEM_MC_END();
6040 return VINF_SUCCESS;
6041}
6042
6043
6044/** Opcode 0x0f 0xae 11b/7. */
6045FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6046{
6047 RT_NOREF_PV(bRm);
6048 IEMOP_MNEMONIC(sfence, "sfence");
6049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6050 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6051 return IEMOP_RAISE_INVALID_OPCODE();
6052
6053 IEM_MC_BEGIN(0, 0);
6054 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6055 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6056 else
6057 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6058 IEM_MC_ADVANCE_RIP();
6059 IEM_MC_END();
6060 return VINF_SUCCESS;
6061}
6062
6063
6064/** Opcode 0xf3 0x0f 0xae 11b/0. */
6065FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6066
6067/** Opcode 0xf3 0x0f 0xae 11b/1. */
6068FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6069
6070/** Opcode 0xf3 0x0f 0xae 11b/2. */
6071FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6072
6073/** Opcode 0xf3 0x0f 0xae 11b/3. */
6074FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6075
6076
6077/** Opcode 0x0f 0xae. */
6078FNIEMOP_DEF(iemOp_Grp15)
6079{
6080 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6082 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6083 {
6084 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6085 {
6086 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
6087 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
6088 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
6089 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
6090 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
6091 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
6092 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
6093 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
6094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6095 }
6096 }
6097 else
6098 {
6099 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
6100 {
6101 case 0:
6102 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6103 {
6104 case 0: return IEMOP_RAISE_INVALID_OPCODE();
6105 case 1: return IEMOP_RAISE_INVALID_OPCODE();
6106 case 2: return IEMOP_RAISE_INVALID_OPCODE();
6107 case 3: return IEMOP_RAISE_INVALID_OPCODE();
6108 case 4: return IEMOP_RAISE_INVALID_OPCODE();
6109 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
6110 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
6111 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
6112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6113 }
6114 break;
6115
6116 case IEM_OP_PRF_REPZ:
6117 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6118 {
6119 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
6120 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
6121 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
6122 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
6123 case 4: return IEMOP_RAISE_INVALID_OPCODE();
6124 case 5: return IEMOP_RAISE_INVALID_OPCODE();
6125 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6126 case 7: return IEMOP_RAISE_INVALID_OPCODE();
6127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6128 }
6129 break;
6130
6131 default:
6132 return IEMOP_RAISE_INVALID_OPCODE();
6133 }
6134 }
6135}
6136
6137
6138/** Opcode 0x0f 0xaf. */
6139FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6140{
6141 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6142 IEMOP_HLP_MIN_386();
6143 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6144 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6145}
6146
6147
6148/** Opcode 0x0f 0xb0. */
6149FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6150{
6151 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6152 IEMOP_HLP_MIN_486();
6153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6154
6155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6156 {
6157 IEMOP_HLP_DONE_DECODING();
6158 IEM_MC_BEGIN(4, 0);
6159 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6160 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6161 IEM_MC_ARG(uint8_t, u8Src, 2);
6162 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6163
6164 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6165 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6166 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6167 IEM_MC_REF_EFLAGS(pEFlags);
6168 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6169 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6170 else
6171 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6172
6173 IEM_MC_ADVANCE_RIP();
6174 IEM_MC_END();
6175 }
6176 else
6177 {
6178 IEM_MC_BEGIN(4, 3);
6179 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6180 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6181 IEM_MC_ARG(uint8_t, u8Src, 2);
6182 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6184 IEM_MC_LOCAL(uint8_t, u8Al);
6185
6186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6187 IEMOP_HLP_DONE_DECODING();
6188 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6189 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6190 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6191 IEM_MC_FETCH_EFLAGS(EFlags);
6192 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6193 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6194 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6195 else
6196 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6197
6198 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6199 IEM_MC_COMMIT_EFLAGS(EFlags);
6200 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6201 IEM_MC_ADVANCE_RIP();
6202 IEM_MC_END();
6203 }
6204 return VINF_SUCCESS;
6205}
6206
6207/** Opcode 0x0f 0xb1. */
6208FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6209{
6210 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6211 IEMOP_HLP_MIN_486();
6212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6213
6214 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6215 {
6216 IEMOP_HLP_DONE_DECODING();
6217 switch (pVCpu->iem.s.enmEffOpSize)
6218 {
6219 case IEMMODE_16BIT:
6220 IEM_MC_BEGIN(4, 0);
6221 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6222 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6223 IEM_MC_ARG(uint16_t, u16Src, 2);
6224 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6225
6226 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6227 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6228 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6229 IEM_MC_REF_EFLAGS(pEFlags);
6230 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6231 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6232 else
6233 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6234
6235 IEM_MC_ADVANCE_RIP();
6236 IEM_MC_END();
6237 return VINF_SUCCESS;
6238
6239 case IEMMODE_32BIT:
6240 IEM_MC_BEGIN(4, 0);
6241 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6242 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6243 IEM_MC_ARG(uint32_t, u32Src, 2);
6244 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6245
6246 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6247 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6248 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6249 IEM_MC_REF_EFLAGS(pEFlags);
6250 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6251 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6252 else
6253 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6254
6255 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6256 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6257 IEM_MC_ADVANCE_RIP();
6258 IEM_MC_END();
6259 return VINF_SUCCESS;
6260
6261 case IEMMODE_64BIT:
6262 IEM_MC_BEGIN(4, 0);
6263 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6264 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6265#ifdef RT_ARCH_X86
6266 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6267#else
6268 IEM_MC_ARG(uint64_t, u64Src, 2);
6269#endif
6270 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6271
6272 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6273 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6274 IEM_MC_REF_EFLAGS(pEFlags);
6275#ifdef RT_ARCH_X86
6276 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6277 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6278 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6279 else
6280 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6281#else
6282 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6283 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6284 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6285 else
6286 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6287#endif
6288
6289 IEM_MC_ADVANCE_RIP();
6290 IEM_MC_END();
6291 return VINF_SUCCESS;
6292
6293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6294 }
6295 }
6296 else
6297 {
6298 switch (pVCpu->iem.s.enmEffOpSize)
6299 {
6300 case IEMMODE_16BIT:
6301 IEM_MC_BEGIN(4, 3);
6302 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6303 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6304 IEM_MC_ARG(uint16_t, u16Src, 2);
6305 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6307 IEM_MC_LOCAL(uint16_t, u16Ax);
6308
6309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6310 IEMOP_HLP_DONE_DECODING();
6311 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6312 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6313 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6314 IEM_MC_FETCH_EFLAGS(EFlags);
6315 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6316 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6317 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6318 else
6319 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6320
6321 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6322 IEM_MC_COMMIT_EFLAGS(EFlags);
6323 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6324 IEM_MC_ADVANCE_RIP();
6325 IEM_MC_END();
6326 return VINF_SUCCESS;
6327
6328 case IEMMODE_32BIT:
6329 IEM_MC_BEGIN(4, 3);
6330 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6331 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6332 IEM_MC_ARG(uint32_t, u32Src, 2);
6333 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6335 IEM_MC_LOCAL(uint32_t, u32Eax);
6336
6337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6338 IEMOP_HLP_DONE_DECODING();
6339 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6340 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6341 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6342 IEM_MC_FETCH_EFLAGS(EFlags);
6343 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6344 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6345 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6346 else
6347 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6348
6349 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6350 IEM_MC_COMMIT_EFLAGS(EFlags);
6351 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 case IEMMODE_64BIT:
6357 IEM_MC_BEGIN(4, 3);
6358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6359 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6360#ifdef RT_ARCH_X86
6361 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6362#else
6363 IEM_MC_ARG(uint64_t, u64Src, 2);
6364#endif
6365 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6367 IEM_MC_LOCAL(uint64_t, u64Rax);
6368
6369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6370 IEMOP_HLP_DONE_DECODING();
6371 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6372 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6373 IEM_MC_FETCH_EFLAGS(EFlags);
6374 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6375#ifdef RT_ARCH_X86
6376 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6377 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6378 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6379 else
6380 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6381#else
6382 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6383 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6384 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6385 else
6386 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6387#endif
6388
6389 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6390 IEM_MC_COMMIT_EFLAGS(EFlags);
6391 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6392 IEM_MC_ADVANCE_RIP();
6393 IEM_MC_END();
6394 return VINF_SUCCESS;
6395
6396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6397 }
6398 }
6399}
6400
6401
6402FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6403{
6404 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6405 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6406
6407 switch (pVCpu->iem.s.enmEffOpSize)
6408 {
6409 case IEMMODE_16BIT:
6410 IEM_MC_BEGIN(5, 1);
6411 IEM_MC_ARG(uint16_t, uSel, 0);
6412 IEM_MC_ARG(uint16_t, offSeg, 1);
6413 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6414 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6415 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6416 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6420 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6421 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6422 IEM_MC_END();
6423 return VINF_SUCCESS;
6424
6425 case IEMMODE_32BIT:
6426 IEM_MC_BEGIN(5, 1);
6427 IEM_MC_ARG(uint16_t, uSel, 0);
6428 IEM_MC_ARG(uint32_t, offSeg, 1);
6429 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6430 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6432 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6435 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6436 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6437 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6438 IEM_MC_END();
6439 return VINF_SUCCESS;
6440
6441 case IEMMODE_64BIT:
6442 IEM_MC_BEGIN(5, 1);
6443 IEM_MC_ARG(uint16_t, uSel, 0);
6444 IEM_MC_ARG(uint64_t, offSeg, 1);
6445 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6446 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6447 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6448 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6451 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6452 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6453 else
6454 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6455 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6456 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6457 IEM_MC_END();
6458 return VINF_SUCCESS;
6459
6460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6461 }
6462}
6463
6464
6465/** Opcode 0x0f 0xb2. */
6466FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6467{
6468 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6469 IEMOP_HLP_MIN_386();
6470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6471 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6472 return IEMOP_RAISE_INVALID_OPCODE();
6473 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6474}
6475
6476
6477/** Opcode 0x0f 0xb3. */
6478FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6479{
6480 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6481 IEMOP_HLP_MIN_386();
6482 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6483}
6484
6485
6486/** Opcode 0x0f 0xb4. */
6487FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6488{
6489 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6490 IEMOP_HLP_MIN_386();
6491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6492 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6493 return IEMOP_RAISE_INVALID_OPCODE();
6494 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6495}
6496
6497
6498/** Opcode 0x0f 0xb5. */
6499FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6500{
6501 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6502 IEMOP_HLP_MIN_386();
6503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6505 return IEMOP_RAISE_INVALID_OPCODE();
6506 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6507}
6508
6509
6510/** Opcode 0x0f 0xb6. */
6511FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6512{
6513 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6514 IEMOP_HLP_MIN_386();
6515
6516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6517
6518 /*
6519 * If rm is denoting a register, no more instruction bytes.
6520 */
6521 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6522 {
6523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6524 switch (pVCpu->iem.s.enmEffOpSize)
6525 {
6526 case IEMMODE_16BIT:
6527 IEM_MC_BEGIN(0, 1);
6528 IEM_MC_LOCAL(uint16_t, u16Value);
6529 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6530 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6531 IEM_MC_ADVANCE_RIP();
6532 IEM_MC_END();
6533 return VINF_SUCCESS;
6534
6535 case IEMMODE_32BIT:
6536 IEM_MC_BEGIN(0, 1);
6537 IEM_MC_LOCAL(uint32_t, u32Value);
6538 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6539 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6540 IEM_MC_ADVANCE_RIP();
6541 IEM_MC_END();
6542 return VINF_SUCCESS;
6543
6544 case IEMMODE_64BIT:
6545 IEM_MC_BEGIN(0, 1);
6546 IEM_MC_LOCAL(uint64_t, u64Value);
6547 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6548 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6549 IEM_MC_ADVANCE_RIP();
6550 IEM_MC_END();
6551 return VINF_SUCCESS;
6552
6553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6554 }
6555 }
6556 else
6557 {
6558 /*
6559 * We're loading a register from memory.
6560 */
6561 switch (pVCpu->iem.s.enmEffOpSize)
6562 {
6563 case IEMMODE_16BIT:
6564 IEM_MC_BEGIN(0, 2);
6565 IEM_MC_LOCAL(uint16_t, u16Value);
6566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6569 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6570 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6571 IEM_MC_ADVANCE_RIP();
6572 IEM_MC_END();
6573 return VINF_SUCCESS;
6574
6575 case IEMMODE_32BIT:
6576 IEM_MC_BEGIN(0, 2);
6577 IEM_MC_LOCAL(uint32_t, u32Value);
6578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6581 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6582 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6583 IEM_MC_ADVANCE_RIP();
6584 IEM_MC_END();
6585 return VINF_SUCCESS;
6586
6587 case IEMMODE_64BIT:
6588 IEM_MC_BEGIN(0, 2);
6589 IEM_MC_LOCAL(uint64_t, u64Value);
6590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6593 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6594 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6595 IEM_MC_ADVANCE_RIP();
6596 IEM_MC_END();
6597 return VINF_SUCCESS;
6598
6599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6600 }
6601 }
6602}
6603
6604
6605/** Opcode 0x0f 0xb7. */
6606FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6607{
6608 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6609 IEMOP_HLP_MIN_386();
6610
6611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6612
6613 /** @todo Not entirely sure how the operand size prefix is handled here,
6614 * assuming that it will be ignored. Would be nice to have a few
6615 * test for this. */
6616 /*
6617 * If rm is denoting a register, no more instruction bytes.
6618 */
6619 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6620 {
6621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6622 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6623 {
6624 IEM_MC_BEGIN(0, 1);
6625 IEM_MC_LOCAL(uint32_t, u32Value);
6626 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6627 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6628 IEM_MC_ADVANCE_RIP();
6629 IEM_MC_END();
6630 }
6631 else
6632 {
6633 IEM_MC_BEGIN(0, 1);
6634 IEM_MC_LOCAL(uint64_t, u64Value);
6635 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6636 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6637 IEM_MC_ADVANCE_RIP();
6638 IEM_MC_END();
6639 }
6640 }
6641 else
6642 {
6643 /*
6644 * We're loading a register from memory.
6645 */
6646 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6647 {
6648 IEM_MC_BEGIN(0, 2);
6649 IEM_MC_LOCAL(uint32_t, u32Value);
6650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6653 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6654 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 }
6658 else
6659 {
6660 IEM_MC_BEGIN(0, 2);
6661 IEM_MC_LOCAL(uint64_t, u64Value);
6662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6666 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6667 IEM_MC_ADVANCE_RIP();
6668 IEM_MC_END();
6669 }
6670 }
6671 return VINF_SUCCESS;
6672}
6673
6674
6675/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6676FNIEMOP_UD_STUB(iemOp_jmpe);
6677/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6678FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6679
6680
6681/** Opcode 0x0f 0xb9. */
6682FNIEMOP_DEF(iemOp_Grp10)
6683{
6684 Log(("iemOp_Grp10 -> #UD\n"));
6685 return IEMOP_RAISE_INVALID_OPCODE();
6686}
6687
6688
6689/** Opcode 0x0f 0xba. */
6690FNIEMOP_DEF(iemOp_Grp8)
6691{
6692 IEMOP_HLP_MIN_386();
6693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6694 PCIEMOPBINSIZES pImpl;
6695 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6696 {
6697 case 0: case 1: case 2: case 3:
6698 return IEMOP_RAISE_INVALID_OPCODE();
6699 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6700 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6701 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6702 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6704 }
6705 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6706
6707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6708 {
6709 /* register destination. */
6710 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6712
6713 switch (pVCpu->iem.s.enmEffOpSize)
6714 {
6715 case IEMMODE_16BIT:
6716 IEM_MC_BEGIN(3, 0);
6717 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6718 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6719 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6720
6721 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6722 IEM_MC_REF_EFLAGS(pEFlags);
6723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6724
6725 IEM_MC_ADVANCE_RIP();
6726 IEM_MC_END();
6727 return VINF_SUCCESS;
6728
6729 case IEMMODE_32BIT:
6730 IEM_MC_BEGIN(3, 0);
6731 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6732 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6733 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6734
6735 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6736 IEM_MC_REF_EFLAGS(pEFlags);
6737 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6738
6739 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6740 IEM_MC_ADVANCE_RIP();
6741 IEM_MC_END();
6742 return VINF_SUCCESS;
6743
6744 case IEMMODE_64BIT:
6745 IEM_MC_BEGIN(3, 0);
6746 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6747 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6748 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6749
6750 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6751 IEM_MC_REF_EFLAGS(pEFlags);
6752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6753
6754 IEM_MC_ADVANCE_RIP();
6755 IEM_MC_END();
6756 return VINF_SUCCESS;
6757
6758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6759 }
6760 }
6761 else
6762 {
6763 /* memory destination. */
6764
6765 uint32_t fAccess;
6766 if (pImpl->pfnLockedU16)
6767 fAccess = IEM_ACCESS_DATA_RW;
6768 else /* BT */
6769 fAccess = IEM_ACCESS_DATA_R;
6770
6771 /** @todo test negative bit offsets! */
6772 switch (pVCpu->iem.s.enmEffOpSize)
6773 {
6774 case IEMMODE_16BIT:
6775 IEM_MC_BEGIN(3, 1);
6776 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6777 IEM_MC_ARG(uint16_t, u16Src, 1);
6778 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6780
6781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6782 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6783 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6784 if (pImpl->pfnLockedU16)
6785 IEMOP_HLP_DONE_DECODING();
6786 else
6787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6788 IEM_MC_FETCH_EFLAGS(EFlags);
6789 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6790 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6791 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6792 else
6793 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6794 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6795
6796 IEM_MC_COMMIT_EFLAGS(EFlags);
6797 IEM_MC_ADVANCE_RIP();
6798 IEM_MC_END();
6799 return VINF_SUCCESS;
6800
6801 case IEMMODE_32BIT:
6802 IEM_MC_BEGIN(3, 1);
6803 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6804 IEM_MC_ARG(uint32_t, u32Src, 1);
6805 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6807
6808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6809 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6810 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6811 if (pImpl->pfnLockedU16)
6812 IEMOP_HLP_DONE_DECODING();
6813 else
6814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6815 IEM_MC_FETCH_EFLAGS(EFlags);
6816 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6817 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6818 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6819 else
6820 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6821 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6822
6823 IEM_MC_COMMIT_EFLAGS(EFlags);
6824 IEM_MC_ADVANCE_RIP();
6825 IEM_MC_END();
6826 return VINF_SUCCESS;
6827
6828 case IEMMODE_64BIT:
6829 IEM_MC_BEGIN(3, 1);
6830 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6831 IEM_MC_ARG(uint64_t, u64Src, 1);
6832 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6834
6835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6836 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6837 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6838 if (pImpl->pfnLockedU16)
6839 IEMOP_HLP_DONE_DECODING();
6840 else
6841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6842 IEM_MC_FETCH_EFLAGS(EFlags);
6843 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6844 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6845 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6846 else
6847 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6848 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6849
6850 IEM_MC_COMMIT_EFLAGS(EFlags);
6851 IEM_MC_ADVANCE_RIP();
6852 IEM_MC_END();
6853 return VINF_SUCCESS;
6854
6855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6856 }
6857 }
6858
6859}
6860
6861
6862/** Opcode 0x0f 0xbb. */
6863FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6864{
6865 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6866 IEMOP_HLP_MIN_386();
6867 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6868}
6869
6870
6871/** Opcode 0x0f 0xbc. */
6872FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6873{
6874 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6875 IEMOP_HLP_MIN_386();
6876 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6877 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6878}
6879
6880
6881/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6882FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6883
6884
6885/** Opcode 0x0f 0xbd. */
6886FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6887{
6888 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6889 IEMOP_HLP_MIN_386();
6890 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6891 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6892}
6893
6894
6895/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6896FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6897
6898
6899/** Opcode 0x0f 0xbe. */
6900FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6901{
6902 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6903 IEMOP_HLP_MIN_386();
6904
6905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6906
6907 /*
6908 * If rm is denoting a register, no more instruction bytes.
6909 */
6910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6911 {
6912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6913 switch (pVCpu->iem.s.enmEffOpSize)
6914 {
6915 case IEMMODE_16BIT:
6916 IEM_MC_BEGIN(0, 1);
6917 IEM_MC_LOCAL(uint16_t, u16Value);
6918 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6919 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6920 IEM_MC_ADVANCE_RIP();
6921 IEM_MC_END();
6922 return VINF_SUCCESS;
6923
6924 case IEMMODE_32BIT:
6925 IEM_MC_BEGIN(0, 1);
6926 IEM_MC_LOCAL(uint32_t, u32Value);
6927 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6928 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6929 IEM_MC_ADVANCE_RIP();
6930 IEM_MC_END();
6931 return VINF_SUCCESS;
6932
6933 case IEMMODE_64BIT:
6934 IEM_MC_BEGIN(0, 1);
6935 IEM_MC_LOCAL(uint64_t, u64Value);
6936 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6937 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6938 IEM_MC_ADVANCE_RIP();
6939 IEM_MC_END();
6940 return VINF_SUCCESS;
6941
6942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6943 }
6944 }
6945 else
6946 {
6947 /*
6948 * We're loading a register from memory.
6949 */
6950 switch (pVCpu->iem.s.enmEffOpSize)
6951 {
6952 case IEMMODE_16BIT:
6953 IEM_MC_BEGIN(0, 2);
6954 IEM_MC_LOCAL(uint16_t, u16Value);
6955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6958 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6959 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6960 IEM_MC_ADVANCE_RIP();
6961 IEM_MC_END();
6962 return VINF_SUCCESS;
6963
6964 case IEMMODE_32BIT:
6965 IEM_MC_BEGIN(0, 2);
6966 IEM_MC_LOCAL(uint32_t, u32Value);
6967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6970 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6971 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6972 IEM_MC_ADVANCE_RIP();
6973 IEM_MC_END();
6974 return VINF_SUCCESS;
6975
6976 case IEMMODE_64BIT:
6977 IEM_MC_BEGIN(0, 2);
6978 IEM_MC_LOCAL(uint64_t, u64Value);
6979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6982 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6983 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6984 IEM_MC_ADVANCE_RIP();
6985 IEM_MC_END();
6986 return VINF_SUCCESS;
6987
6988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6989 }
6990 }
6991}
6992
6993
6994/** Opcode 0x0f 0xbf. */
6995FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6996{
6997 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6998 IEMOP_HLP_MIN_386();
6999
7000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7001
7002 /** @todo Not entirely sure how the operand size prefix is handled here,
7003 * assuming that it will be ignored. Would be nice to have a few
7004 * test for this. */
7005 /*
7006 * If rm is denoting a register, no more instruction bytes.
7007 */
7008 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7009 {
7010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7011 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7012 {
7013 IEM_MC_BEGIN(0, 1);
7014 IEM_MC_LOCAL(uint32_t, u32Value);
7015 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7016 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7017 IEM_MC_ADVANCE_RIP();
7018 IEM_MC_END();
7019 }
7020 else
7021 {
7022 IEM_MC_BEGIN(0, 1);
7023 IEM_MC_LOCAL(uint64_t, u64Value);
7024 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7025 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7026 IEM_MC_ADVANCE_RIP();
7027 IEM_MC_END();
7028 }
7029 }
7030 else
7031 {
7032 /*
7033 * We're loading a register from memory.
7034 */
7035 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7036 {
7037 IEM_MC_BEGIN(0, 2);
7038 IEM_MC_LOCAL(uint32_t, u32Value);
7039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7042 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7043 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7044 IEM_MC_ADVANCE_RIP();
7045 IEM_MC_END();
7046 }
7047 else
7048 {
7049 IEM_MC_BEGIN(0, 2);
7050 IEM_MC_LOCAL(uint64_t, u64Value);
7051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7054 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7055 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7056 IEM_MC_ADVANCE_RIP();
7057 IEM_MC_END();
7058 }
7059 }
7060 return VINF_SUCCESS;
7061}
7062
7063
7064/** Opcode 0x0f 0xc0. */
7065FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7066{
7067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7068 IEMOP_HLP_MIN_486();
7069 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7070
7071 /*
7072 * If rm is denoting a register, no more instruction bytes.
7073 */
7074 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7075 {
7076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7077
7078 IEM_MC_BEGIN(3, 0);
7079 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7080 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7081 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7082
7083 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7084 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7085 IEM_MC_REF_EFLAGS(pEFlags);
7086 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7087
7088 IEM_MC_ADVANCE_RIP();
7089 IEM_MC_END();
7090 }
7091 else
7092 {
7093 /*
7094 * We're accessing memory.
7095 */
7096 IEM_MC_BEGIN(3, 3);
7097 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7098 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7099 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7100 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7102
7103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7104 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7105 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7106 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7107 IEM_MC_FETCH_EFLAGS(EFlags);
7108 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7109 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7110 else
7111 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7112
7113 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7114 IEM_MC_COMMIT_EFLAGS(EFlags);
7115 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7116 IEM_MC_ADVANCE_RIP();
7117 IEM_MC_END();
7118 return VINF_SUCCESS;
7119 }
7120 return VINF_SUCCESS;
7121}
7122
7123
7124/** Opcode 0x0f 0xc1. */
7125FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7126{
7127 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7128 IEMOP_HLP_MIN_486();
7129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7130
7131 /*
7132 * If rm is denoting a register, no more instruction bytes.
7133 */
7134 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7135 {
7136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7137
7138 switch (pVCpu->iem.s.enmEffOpSize)
7139 {
7140 case IEMMODE_16BIT:
7141 IEM_MC_BEGIN(3, 0);
7142 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7143 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7144 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7145
7146 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7147 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7148 IEM_MC_REF_EFLAGS(pEFlags);
7149 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7150
7151 IEM_MC_ADVANCE_RIP();
7152 IEM_MC_END();
7153 return VINF_SUCCESS;
7154
7155 case IEMMODE_32BIT:
7156 IEM_MC_BEGIN(3, 0);
7157 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7158 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7159 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7160
7161 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7162 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7163 IEM_MC_REF_EFLAGS(pEFlags);
7164 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7165
7166 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7167 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7168 IEM_MC_ADVANCE_RIP();
7169 IEM_MC_END();
7170 return VINF_SUCCESS;
7171
7172 case IEMMODE_64BIT:
7173 IEM_MC_BEGIN(3, 0);
7174 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7175 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7176 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7177
7178 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7179 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7180 IEM_MC_REF_EFLAGS(pEFlags);
7181 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7182
7183 IEM_MC_ADVANCE_RIP();
7184 IEM_MC_END();
7185 return VINF_SUCCESS;
7186
7187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7188 }
7189 }
7190 else
7191 {
7192 /*
7193 * We're accessing memory.
7194 */
7195 switch (pVCpu->iem.s.enmEffOpSize)
7196 {
7197 case IEMMODE_16BIT:
7198 IEM_MC_BEGIN(3, 3);
7199 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7200 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7201 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7202 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7204
7205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7206 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7207 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7208 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7209 IEM_MC_FETCH_EFLAGS(EFlags);
7210 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7211 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7212 else
7213 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7214
7215 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7216 IEM_MC_COMMIT_EFLAGS(EFlags);
7217 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7218 IEM_MC_ADVANCE_RIP();
7219 IEM_MC_END();
7220 return VINF_SUCCESS;
7221
7222 case IEMMODE_32BIT:
7223 IEM_MC_BEGIN(3, 3);
7224 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7225 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7226 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7227 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7229
7230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7231 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7232 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7233 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7234 IEM_MC_FETCH_EFLAGS(EFlags);
7235 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7236 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7237 else
7238 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7239
7240 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7241 IEM_MC_COMMIT_EFLAGS(EFlags);
7242 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7243 IEM_MC_ADVANCE_RIP();
7244 IEM_MC_END();
7245 return VINF_SUCCESS;
7246
7247 case IEMMODE_64BIT:
7248 IEM_MC_BEGIN(3, 3);
7249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7250 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7251 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7252 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7254
7255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7256 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7257 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7258 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7259 IEM_MC_FETCH_EFLAGS(EFlags);
7260 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7261 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7262 else
7263 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7264
7265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7266 IEM_MC_COMMIT_EFLAGS(EFlags);
7267 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7268 IEM_MC_ADVANCE_RIP();
7269 IEM_MC_END();
7270 return VINF_SUCCESS;
7271
7272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7273 }
7274 }
7275}
7276
7277
7278/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7279FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7280/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7281FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7282/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7283FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7284/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7285FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7286
7287
7288/** Opcode 0x0f 0xc3. */
7289FNIEMOP_DEF(iemOp_movnti_My_Gy)
7290{
7291 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7292
7293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7294
7295 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7296 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7297 {
7298 switch (pVCpu->iem.s.enmEffOpSize)
7299 {
7300 case IEMMODE_32BIT:
7301 IEM_MC_BEGIN(0, 2);
7302 IEM_MC_LOCAL(uint32_t, u32Value);
7303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7304
7305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7307 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7308 return IEMOP_RAISE_INVALID_OPCODE();
7309
7310 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7311 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7312 IEM_MC_ADVANCE_RIP();
7313 IEM_MC_END();
7314 break;
7315
7316 case IEMMODE_64BIT:
7317 IEM_MC_BEGIN(0, 2);
7318 IEM_MC_LOCAL(uint64_t, u64Value);
7319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7320
7321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7323 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7324 return IEMOP_RAISE_INVALID_OPCODE();
7325
7326 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7327 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7328 IEM_MC_ADVANCE_RIP();
7329 IEM_MC_END();
7330 break;
7331
7332 case IEMMODE_16BIT:
7333 /** @todo check this form. */
7334 return IEMOP_RAISE_INVALID_OPCODE();
7335 }
7336 }
7337 else
7338 return IEMOP_RAISE_INVALID_OPCODE();
7339 return VINF_SUCCESS;
7340}
7341/* Opcode 0x66 0x0f 0xc3 - invalid */
7342/* Opcode 0xf3 0x0f 0xc3 - invalid */
7343/* Opcode 0xf2 0x0f 0xc3 - invalid */
7344
7345/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7346FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7347/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7348FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7349/* Opcode 0xf3 0x0f 0xc4 - invalid */
7350/* Opcode 0xf2 0x0f 0xc4 - invalid */
7351
7352/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7353FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7354/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7355FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7356/* Opcode 0xf3 0x0f 0xc5 - invalid */
7357/* Opcode 0xf2 0x0f 0xc5 - invalid */
7358
7359/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7360FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7361/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7362FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7363/* Opcode 0xf3 0x0f 0xc6 - invalid */
7364/* Opcode 0xf2 0x0f 0xc6 - invalid */
7365
7366
7367/** Opcode 0x0f 0xc7 !11/1. */
7368FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7369{
7370 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7371
7372 IEM_MC_BEGIN(4, 3);
7373 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7374 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7375 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7376 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7377 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7378 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7380
7381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7382 IEMOP_HLP_DONE_DECODING();
7383 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7384
7385 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7386 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7387 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7388
7389 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7390 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7391 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7392
7393 IEM_MC_FETCH_EFLAGS(EFlags);
7394 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7395 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7396 else
7397 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7398
7399 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7400 IEM_MC_COMMIT_EFLAGS(EFlags);
7401 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7402 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7403 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7404 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7405 IEM_MC_ENDIF();
7406 IEM_MC_ADVANCE_RIP();
7407
7408 IEM_MC_END();
7409 return VINF_SUCCESS;
7410}
7411
7412
7413/** Opcode REX.W 0x0f 0xc7 !11/1. */
7414FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7415{
7416 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7417 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7418 {
7419#if 0
7420 RT_NOREF(bRm);
7421 IEMOP_BITCH_ABOUT_STUB();
7422 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7423#else
7424 IEM_MC_BEGIN(4, 3);
7425 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7426 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7427 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7428 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7429 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7430 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7432
7433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7434 IEMOP_HLP_DONE_DECODING();
7435 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7436 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7437
7438 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7439 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7440 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7441
7442 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7443 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7444 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7445
7446 IEM_MC_FETCH_EFLAGS(EFlags);
7447# ifdef RT_ARCH_AMD64
7448 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7449 {
7450 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7451 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7452 else
7453 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7454 }
7455 else
7456# endif
7457 {
7458 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7459 accesses and not all all atomic, which works fine on in UNI CPU guest
7460 configuration (ignoring DMA). If guest SMP is active we have no choice
7461 but to use a rendezvous callback here. Sigh. */
7462 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7463 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7464 else
7465 {
7466 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7467 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7468 }
7469 }
7470
7471 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7472 IEM_MC_COMMIT_EFLAGS(EFlags);
7473 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7474 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7475 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7476 IEM_MC_ENDIF();
7477 IEM_MC_ADVANCE_RIP();
7478
7479 IEM_MC_END();
7480 return VINF_SUCCESS;
7481#endif
7482 }
7483 Log(("cmpxchg16b -> #UD\n"));
7484 return IEMOP_RAISE_INVALID_OPCODE();
7485}
7486
7487
7488/** Opcode 0x0f 0xc7 11/6. */
7489FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7490
7491/** Opcode 0x0f 0xc7 !11/6. */
7492FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7493
7494/** Opcode 0x66 0x0f 0xc7 !11/6. */
7495FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7496
7497/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7498FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7499
7500/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7501FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7502
7503
7504/** Opcode 0x0f 0xc7. */
7505FNIEMOP_DEF(iemOp_Grp9)
7506{
7507 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7509 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7510 {
7511 case 0: case 2: case 3: case 4: case 5:
7512 return IEMOP_RAISE_INVALID_OPCODE();
7513 case 1:
7514 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7515 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7516 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7517 return IEMOP_RAISE_INVALID_OPCODE();
7518 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7519 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7520 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7521 case 6:
7522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7523 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7524 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7525 {
7526 case 0:
7527 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7528 case IEM_OP_PRF_SIZE_OP:
7529 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7530 case IEM_OP_PRF_REPZ:
7531 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7532 default:
7533 return IEMOP_RAISE_INVALID_OPCODE();
7534 }
7535 case 7:
7536 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7537 {
7538 case 0:
7539 case IEM_OP_PRF_REPZ:
7540 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7541 default:
7542 return IEMOP_RAISE_INVALID_OPCODE();
7543 }
7544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7545 }
7546}
7547
7548
7549/**
7550 * Common 'bswap register' helper.
7551 */
7552FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7553{
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7555 switch (pVCpu->iem.s.enmEffOpSize)
7556 {
7557 case IEMMODE_16BIT:
7558 IEM_MC_BEGIN(1, 0);
7559 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7560 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7561 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7562 IEM_MC_ADVANCE_RIP();
7563 IEM_MC_END();
7564 return VINF_SUCCESS;
7565
7566 case IEMMODE_32BIT:
7567 IEM_MC_BEGIN(1, 0);
7568 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7569 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7570 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7571 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7572 IEM_MC_ADVANCE_RIP();
7573 IEM_MC_END();
7574 return VINF_SUCCESS;
7575
7576 case IEMMODE_64BIT:
7577 IEM_MC_BEGIN(1, 0);
7578 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7579 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7580 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7581 IEM_MC_ADVANCE_RIP();
7582 IEM_MC_END();
7583 return VINF_SUCCESS;
7584
7585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7586 }
7587}
7588
7589
7590/** Opcode 0x0f 0xc8. */
7591FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7592{
7593 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7594 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7595 prefix. REX.B is the correct prefix it appears. For a parallel
7596 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7597 IEMOP_HLP_MIN_486();
7598 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7599}
7600
7601
7602/** Opcode 0x0f 0xc9. */
7603FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7604{
7605 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7606 IEMOP_HLP_MIN_486();
7607 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7608}
7609
7610
7611/** Opcode 0x0f 0xca. */
7612FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7613{
7614 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7615 IEMOP_HLP_MIN_486();
7616 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7617}
7618
7619
7620/** Opcode 0x0f 0xcb. */
7621FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7622{
7623 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7624 IEMOP_HLP_MIN_486();
7625 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7626}
7627
7628
7629/** Opcode 0x0f 0xcc. */
7630FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7631{
7632 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7633 IEMOP_HLP_MIN_486();
7634 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7635}
7636
7637
7638/** Opcode 0x0f 0xcd. */
7639FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7640{
7641 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7642 IEMOP_HLP_MIN_486();
7643 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7644}
7645
7646
7647/** Opcode 0x0f 0xce. */
7648FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7649{
7650 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7651 IEMOP_HLP_MIN_486();
7652 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7653}
7654
7655
7656/** Opcode 0x0f 0xcf. */
7657FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7658{
7659 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7660 IEMOP_HLP_MIN_486();
7661 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7662}
7663
7664
7665/* Opcode 0x0f 0xd0 - invalid */
7666/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7667FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7668/* Opcode 0xf3 0x0f 0xd0 - invalid */
7669/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7670FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7671
7672/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7673FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7674/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7675FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7676/* Opcode 0xf3 0x0f 0xd1 - invalid */
7677/* Opcode 0xf2 0x0f 0xd1 - invalid */
7678
7679/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7680FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7681/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7682FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7683/* Opcode 0xf3 0x0f 0xd2 - invalid */
7684/* Opcode 0xf2 0x0f 0xd2 - invalid */
7685
7686/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7687FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7688/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7689FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7690/* Opcode 0xf3 0x0f 0xd3 - invalid */
7691/* Opcode 0xf2 0x0f 0xd3 - invalid */
7692
7693/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7694FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7695/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7696FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7697/* Opcode 0xf3 0x0f 0xd4 - invalid */
7698/* Opcode 0xf2 0x0f 0xd4 - invalid */
7699
7700/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7701FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7702/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7703FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7704/* Opcode 0xf3 0x0f 0xd5 - invalid */
7705/* Opcode 0xf2 0x0f 0xd5 - invalid */
7706
7707/* Opcode 0x0f 0xd6 - invalid */
7708/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7709FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7710/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7711FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7712/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7713FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7714#if 0
7715FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7716{
7717 /* Docs says register only. */
7718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7719
7720 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7721 {
7722 case IEM_OP_PRF_SIZE_OP: /* SSE */
7723 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7724 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7725 IEM_MC_BEGIN(2, 0);
7726 IEM_MC_ARG(uint64_t *, pDst, 0);
7727 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7728 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7729 IEM_MC_PREPARE_SSE_USAGE();
7730 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7731 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7732 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7733 IEM_MC_ADVANCE_RIP();
7734 IEM_MC_END();
7735 return VINF_SUCCESS;
7736
7737 case 0: /* MMX */
7738 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7739 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7740 IEM_MC_BEGIN(2, 0);
7741 IEM_MC_ARG(uint64_t *, pDst, 0);
7742 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7743 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7744 IEM_MC_PREPARE_FPU_USAGE();
7745 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7746 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7747 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7748 IEM_MC_ADVANCE_RIP();
7749 IEM_MC_END();
7750 return VINF_SUCCESS;
7751
7752 default:
7753 return IEMOP_RAISE_INVALID_OPCODE();
7754 }
7755}
7756#endif
7757
7758
7759/** Opcode 0x0f 0xd7. */
7760FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7761{
7762 /* Docs says register only. */
7763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7764 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7765 return IEMOP_RAISE_INVALID_OPCODE();
7766
7767 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7768 /** @todo testcase: Check that the instruction implicitly clears the high
7769 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7770 * and opcode modifications are made to work with the whole width (not
7771 * just 128). */
7772 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7773 {
7774 case IEM_OP_PRF_SIZE_OP: /* SSE */
7775 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7776 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7777 IEM_MC_BEGIN(2, 0);
7778 IEM_MC_ARG(uint64_t *, pDst, 0);
7779 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7781 IEM_MC_PREPARE_SSE_USAGE();
7782 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7783 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7784 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7785 IEM_MC_ADVANCE_RIP();
7786 IEM_MC_END();
7787 return VINF_SUCCESS;
7788
7789 case 0: /* MMX */
7790 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7791 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7792 IEM_MC_BEGIN(2, 0);
7793 IEM_MC_ARG(uint64_t *, pDst, 0);
7794 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7795 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7796 IEM_MC_PREPARE_FPU_USAGE();
7797 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7798 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7799 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7800 IEM_MC_ADVANCE_RIP();
7801 IEM_MC_END();
7802 return VINF_SUCCESS;
7803
7804 default:
7805 return IEMOP_RAISE_INVALID_OPCODE();
7806 }
7807}
7808
7809
7810/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7811FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7812/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7813FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7814/* Opcode 0xf3 0x0f 0xd8 - invalid */
7815/* Opcode 0xf2 0x0f 0xd8 - invalid */
7816
7817/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7818FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7819/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7820FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7821/* Opcode 0xf3 0x0f 0xd9 - invalid */
7822/* Opcode 0xf2 0x0f 0xd9 - invalid */
7823
7824/** Opcode 0x0f 0xda - pminub Pq, Qq */
7825FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7826/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7827FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7828/* Opcode 0xf3 0x0f 0xda - invalid */
7829/* Opcode 0xf2 0x0f 0xda - invalid */
7830
7831/** Opcode 0x0f 0xdb - pand Pq, Qq */
7832FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7833/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7834FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7835/* Opcode 0xf3 0x0f 0xdb - invalid */
7836/* Opcode 0xf2 0x0f 0xdb - invalid */
7837
7838/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7839FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7840/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7841FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7842/* Opcode 0xf3 0x0f 0xdc - invalid */
7843/* Opcode 0xf2 0x0f 0xdc - invalid */
7844
7845/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7846FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7847/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7848FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7849/* Opcode 0xf3 0x0f 0xdd - invalid */
7850/* Opcode 0xf2 0x0f 0xdd - invalid */
7851
7852/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7853FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7854/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7855FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7856/* Opcode 0xf3 0x0f 0xde - invalid */
7857/* Opcode 0xf2 0x0f 0xde - invalid */
7858
7859/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7860FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7861/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7862FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7863/* Opcode 0xf3 0x0f 0xdf - invalid */
7864/* Opcode 0xf2 0x0f 0xdf - invalid */
7865
7866/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7867FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7868/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7869FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7870/* Opcode 0xf3 0x0f 0xe0 - invalid */
7871/* Opcode 0xf2 0x0f 0xe0 - invalid */
7872
7873/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7874FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7875/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7876FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7877/* Opcode 0xf3 0x0f 0xe1 - invalid */
7878/* Opcode 0xf2 0x0f 0xe1 - invalid */
7879
7880/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7881FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7882/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7883FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7884/* Opcode 0xf3 0x0f 0xe2 - invalid */
7885/* Opcode 0xf2 0x0f 0xe2 - invalid */
7886
7887/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7888FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7889/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7890FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7891/* Opcode 0xf3 0x0f 0xe3 - invalid */
7892/* Opcode 0xf2 0x0f 0xe3 - invalid */
7893
7894/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7895FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7896/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7897FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7898/* Opcode 0xf3 0x0f 0xe4 - invalid */
7899/* Opcode 0xf2 0x0f 0xe4 - invalid */
7900
7901/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7902FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7903/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7904FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7905/* Opcode 0xf3 0x0f 0xe5 - invalid */
7906/* Opcode 0xf2 0x0f 0xe5 - invalid */
7907
7908/* Opcode 0x0f 0xe6 - invalid */
7909/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7910FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7911/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7912FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7913/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7914FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7915
7916
7917/** Opcode 0x0f 0xe7. */
7918FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7919{
7920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7921 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7922 {
7923 /*
7924 * Register, memory.
7925 */
7926/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7927 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7928 {
7929
7930 case IEM_OP_PRF_SIZE_OP: /* SSE */
7931 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7932 IEM_MC_BEGIN(0, 2);
7933 IEM_MC_LOCAL(uint128_t, uSrc);
7934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7935
7936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7938 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7939 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7940
7941 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7942 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7943
7944 IEM_MC_ADVANCE_RIP();
7945 IEM_MC_END();
7946 break;
7947
7948 case 0: /* MMX */
7949 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7950 IEM_MC_BEGIN(0, 2);
7951 IEM_MC_LOCAL(uint64_t, uSrc);
7952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7953
7954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7956 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7957 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7958
7959 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7960 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7961
7962 IEM_MC_ADVANCE_RIP();
7963 IEM_MC_END();
7964 break;
7965
7966 default:
7967 return IEMOP_RAISE_INVALID_OPCODE();
7968 }
7969 }
7970 /* The register, register encoding is invalid. */
7971 else
7972 return IEMOP_RAISE_INVALID_OPCODE();
7973 return VINF_SUCCESS;
7974}
7975
7976
7977/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7978FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7979/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7980FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7981/* Opcode 0xf3 0x0f 0xe8 - invalid */
7982/* Opcode 0xf2 0x0f 0xe8 - invalid */
7983
7984/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7985FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7986/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7987FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7988/* Opcode 0xf3 0x0f 0xe9 - invalid */
7989/* Opcode 0xf2 0x0f 0xe9 - invalid */
7990
7991/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7992FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7993/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7994FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7995/* Opcode 0xf3 0x0f 0xea - invalid */
7996/* Opcode 0xf2 0x0f 0xea - invalid */
7997
7998/** Opcode 0x0f 0xeb - por Pq, Qq */
7999FNIEMOP_STUB(iemOp_por_Pq_Qq);
8000/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
8001FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
8002/* Opcode 0xf3 0x0f 0xeb - invalid */
8003/* Opcode 0xf2 0x0f 0xeb - invalid */
8004
8005/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8006FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8007/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
8008FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
8009/* Opcode 0xf3 0x0f 0xec - invalid */
8010/* Opcode 0xf2 0x0f 0xec - invalid */
8011
8012/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8013FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8014/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
8015FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
8016/* Opcode 0xf3 0x0f 0xed - invalid */
8017/* Opcode 0xf2 0x0f 0xed - invalid */
8018
8019/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8020FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8021/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
8022FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
8023/* Opcode 0xf3 0x0f 0xee - invalid */
8024/* Opcode 0xf2 0x0f 0xee - invalid */
8025
8026
8027/** Opcode 0x0f 0xef - pxor Pq, Qq */
8028FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8029{
8030 IEMOP_MNEMONIC(pxor, "pxor");
8031 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8032}
8033
8034/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
8035FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
8036{
8037 IEMOP_MNEMONIC(vpxor, "vpxor");
8038 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8039}
8040
8041/* Opcode 0xf3 0x0f 0xef - invalid */
8042/* Opcode 0xf2 0x0f 0xef - invalid */
8043
8044/* Opcode 0x0f 0xf0 - invalid */
8045/* Opcode 0x66 0x0f 0xf0 - invalid */
8046/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
8047FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
8048
8049/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8050FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8051/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
8052FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
8053/* Opcode 0xf2 0x0f 0xf1 - invalid */
8054
8055/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8056FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8057/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
8058FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
8059/* Opcode 0xf2 0x0f 0xf2 - invalid */
8060
8061/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8062FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8063/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
8064FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
8065/* Opcode 0xf2 0x0f 0xf3 - invalid */
8066
8067/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8068FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8069/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
8070FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
8071/* Opcode 0xf2 0x0f 0xf4 - invalid */
8072
8073/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8074FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8075/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
8076FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
8077/* Opcode 0xf2 0x0f 0xf5 - invalid */
8078
8079/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8080FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8081/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
8082FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
8083/* Opcode 0xf2 0x0f 0xf6 - invalid */
8084
8085/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8086FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8087/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
8088FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
8089/* Opcode 0xf2 0x0f 0xf7 - invalid */
8090
8091/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8092FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8093/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
8094FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
8095/* Opcode 0xf2 0x0f 0xf8 - invalid */
8096
8097/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8098FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8099/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
8100FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
8101/* Opcode 0xf2 0x0f 0xf9 - invalid */
8102
8103/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8104FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8105/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
8106FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
8107/* Opcode 0xf2 0x0f 0xfa - invalid */
8108
8109/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8110FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8111/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
8112FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
8113/* Opcode 0xf2 0x0f 0xfb - invalid */
8114
8115/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8116FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8117/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
8118FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
8119/* Opcode 0xf2 0x0f 0xfc - invalid */
8120
8121/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8122FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8123/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
8124FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
8125/* Opcode 0xf2 0x0f 0xfd - invalid */
8126
8127/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8128FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8129/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
8130FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
8131/* Opcode 0xf2 0x0f 0xfe - invalid */
8132
8133
8134/** Opcode **** 0x0f 0xff - UD0 */
8135FNIEMOP_DEF(iemOp_ud0)
8136{
8137 IEMOP_MNEMONIC(ud0, "ud0");
8138 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8139 {
8140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8141#ifndef TST_IEM_CHECK_MC
8142 RTGCPTR GCPtrEff;
8143 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8144 if (rcStrict != VINF_SUCCESS)
8145 return rcStrict;
8146#endif
8147 IEMOP_HLP_DONE_DECODING();
8148 }
8149 return IEMOP_RAISE_INVALID_OPCODE();
8150}
8151
8152
8153
8154/** Repeats a_fn four times. For decoding tables. */
8155#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
8156
8157IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8158{
8159 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8160 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8161 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8162 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8163 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8164 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8165 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8166 /* 0x06 */ IEMOP_X4(iemOp_clts),
8167 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8168 /* 0x08 */ IEMOP_X4(iemOp_invd),
8169 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8170 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8171 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8172 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8173 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8174 /* 0x0e */ IEMOP_X4(iemOp_femms),
8175 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8176
8177 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8178 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8179 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8180 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8181 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8182 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8183 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8184 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8185 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8186 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8187 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8188 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8189 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8190 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8191 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8192 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8193
8194 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8195 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8196 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8197 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8198 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8199 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8200 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8201 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8202 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8203 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8204 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8205 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8206 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8207 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8208 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8209 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8210
8211 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8212 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8213 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8214 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8215 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8216 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8217 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8218 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8219 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8220 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8221 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8222 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8223 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8224 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8225 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8226 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8227
8228 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8229 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8230 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8231 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8232 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8233 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8234 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8235 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8236 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8237 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8238 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8239 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8240 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8241 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8242 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8243 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8244
8245 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8246 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8247 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8248 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8249 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8250 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8251 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8252 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8253 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8254 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8255 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8256 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8257 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8258 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8259 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8260 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8261
8262 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8263 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8264 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8265 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8266 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8267 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8268 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8269 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8270 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8271 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8272 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8273 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8274 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8275 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8276 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8277 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8278
8279 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8280 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8281 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8282 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8283 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8284 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8285 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8286 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8287
8288 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8289 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8290 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8291 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8292 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8293 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8294 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8295 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8296
8297 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8298 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8299 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8300 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8301 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8302 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8303 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8304 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8305 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8306 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8307 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8308 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8309 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8310 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8311 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8312 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8313
8314 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8315 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8316 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8317 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8318 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8319 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8320 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8321 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8322 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8323 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8324 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8325 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8326 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8327 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8328 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8329 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8330
8331 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8332 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8333 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8334 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8335 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8336 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8337 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8338 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8339 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8340 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8341 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8342 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8343 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8344 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8345 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8346 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8347
8348 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8349 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8350 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8351 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8352 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8353 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8354 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8355 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8356 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8357 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8358 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8359 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8360 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8361 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8362 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8363 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8364
8365 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8366 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8367 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8368 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8369 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8370 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8371 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8372 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8373 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8374 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8375 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8376 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8377 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8378 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8379 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8380 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8381
8382 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8383 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8384 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8385 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8386 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8387 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8388 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8389 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8390 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8391 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8392 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8393 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8394 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8395 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8396 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8397 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8398
8399 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8400 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8401 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8402 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8403 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8404 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8405 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8406 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8407 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8408 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8409 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8410 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8411 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8412 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8413 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8414 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8415
8416 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8417 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8418 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8419 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8420 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8421 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8422 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8423 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8424 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8425 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8426 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8427 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8428 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8429 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8430 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8431 /* 0xff */ IEMOP_X4(iemOp_ud0),
8432};
8433AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8434/** @} */
8435
8436
8437/** @name One byte opcodes.
8438 *
8439 * @{
8440 */
8441
8442/** Opcode 0x00. */
8443FNIEMOP_DEF(iemOp_add_Eb_Gb)
8444{
8445 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8446 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8447}
8448
8449
8450/** Opcode 0x01. */
8451FNIEMOP_DEF(iemOp_add_Ev_Gv)
8452{
8453 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8454 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8455}
8456
8457
8458/** Opcode 0x02. */
8459FNIEMOP_DEF(iemOp_add_Gb_Eb)
8460{
8461 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8462 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8463}
8464
8465
8466/** Opcode 0x03. */
8467FNIEMOP_DEF(iemOp_add_Gv_Ev)
8468{
8469 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8470 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8471}
8472
8473
8474/** Opcode 0x04. */
8475FNIEMOP_DEF(iemOp_add_Al_Ib)
8476{
8477 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8478 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8479}
8480
8481
8482/** Opcode 0x05. */
8483FNIEMOP_DEF(iemOp_add_eAX_Iz)
8484{
8485 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8486 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8487}
8488
8489
8490/** Opcode 0x06. */
8491FNIEMOP_DEF(iemOp_push_ES)
8492{
8493 IEMOP_MNEMONIC(push_es, "push es");
8494 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8495}
8496
8497
8498/** Opcode 0x07. */
8499FNIEMOP_DEF(iemOp_pop_ES)
8500{
8501 IEMOP_MNEMONIC(pop_es, "pop es");
8502 IEMOP_HLP_NO_64BIT();
8503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8504 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8505}
8506
8507
8508/** Opcode 0x08. */
8509FNIEMOP_DEF(iemOp_or_Eb_Gb)
8510{
8511 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8514}
8515
8516
8517/** Opcode 0x09. */
8518FNIEMOP_DEF(iemOp_or_Ev_Gv)
8519{
8520 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8521 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8522 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8523}
8524
8525
8526/** Opcode 0x0a. */
8527FNIEMOP_DEF(iemOp_or_Gb_Eb)
8528{
8529 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8530 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8531 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8532}
8533
8534
8535/** Opcode 0x0b. */
8536FNIEMOP_DEF(iemOp_or_Gv_Ev)
8537{
8538 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8539 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8540 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8541}
8542
8543
8544/** Opcode 0x0c. */
8545FNIEMOP_DEF(iemOp_or_Al_Ib)
8546{
8547 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8548 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8549 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8550}
8551
8552
8553/** Opcode 0x0d. */
8554FNIEMOP_DEF(iemOp_or_eAX_Iz)
8555{
8556 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8557 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8558 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8559}
8560
8561
8562/** Opcode 0x0e. */
8563FNIEMOP_DEF(iemOp_push_CS)
8564{
8565 IEMOP_MNEMONIC(push_cs, "push cs");
8566 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8567}
8568
8569
8570/** Opcode 0x0f. */
8571FNIEMOP_DEF(iemOp_2byteEscape)
8572{
8573#ifdef VBOX_STRICT
8574 static bool s_fTested = false;
8575 if (RT_LIKELY(s_fTested)) { /* likely */ }
8576 else
8577 {
8578 s_fTested = true;
8579 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8580 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8581 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8582 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8583 }
8584#endif
8585
8586 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8587
8588 /** @todo PUSH CS on 8086, undefined on 80186. */
8589 IEMOP_HLP_MIN_286();
8590 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8591}
8592
8593/** Opcode 0x10. */
8594FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8595{
8596 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8597 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8598}
8599
8600
8601/** Opcode 0x11. */
8602FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8603{
8604 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8605 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8606}
8607
8608
8609/** Opcode 0x12. */
8610FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8611{
8612 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8613 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8614}
8615
8616
8617/** Opcode 0x13. */
8618FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8619{
8620 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8622}
8623
8624
8625/** Opcode 0x14. */
8626FNIEMOP_DEF(iemOp_adc_Al_Ib)
8627{
8628 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8629 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8630}
8631
8632
8633/** Opcode 0x15. */
8634FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8635{
8636 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8637 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8638}
8639
8640
8641/** Opcode 0x16. */
8642FNIEMOP_DEF(iemOp_push_SS)
8643{
8644 IEMOP_MNEMONIC(push_ss, "push ss");
8645 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8646}
8647
8648
8649/** Opcode 0x17. */
8650FNIEMOP_DEF(iemOp_pop_SS)
8651{
8652 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8654 IEMOP_HLP_NO_64BIT();
8655 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8656}
8657
8658
8659/** Opcode 0x18. */
8660FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8661{
8662 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8663 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8664}
8665
8666
8667/** Opcode 0x19. */
8668FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8669{
8670 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8671 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8672}
8673
8674
8675/** Opcode 0x1a. */
8676FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8677{
8678 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8679 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8680}
8681
8682
8683/** Opcode 0x1b. */
8684FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8685{
8686 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8687 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8688}
8689
8690
8691/** Opcode 0x1c. */
8692FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8693{
8694 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8695 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8696}
8697
8698
8699/** Opcode 0x1d. */
8700FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8701{
8702 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8703 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8704}
8705
8706
8707/** Opcode 0x1e. */
8708FNIEMOP_DEF(iemOp_push_DS)
8709{
8710 IEMOP_MNEMONIC(push_ds, "push ds");
8711 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8712}
8713
8714
8715/** Opcode 0x1f. */
8716FNIEMOP_DEF(iemOp_pop_DS)
8717{
8718 IEMOP_MNEMONIC(pop_ds, "pop ds");
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEMOP_HLP_NO_64BIT();
8721 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8722}
8723
8724
8725/** Opcode 0x20. */
8726FNIEMOP_DEF(iemOp_and_Eb_Gb)
8727{
8728 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8729 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8730 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8731}
8732
8733
8734/** Opcode 0x21. */
8735FNIEMOP_DEF(iemOp_and_Ev_Gv)
8736{
8737 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8738 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8739 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8740}
8741
8742
8743/** Opcode 0x22. */
8744FNIEMOP_DEF(iemOp_and_Gb_Eb)
8745{
8746 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8747 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8748 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8749}
8750
8751
8752/** Opcode 0x23. */
8753FNIEMOP_DEF(iemOp_and_Gv_Ev)
8754{
8755 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8757 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8758}
8759
8760
8761/** Opcode 0x24. */
8762FNIEMOP_DEF(iemOp_and_Al_Ib)
8763{
8764 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8765 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8766 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8767}
8768
8769
8770/** Opcode 0x25. */
8771FNIEMOP_DEF(iemOp_and_eAX_Iz)
8772{
8773 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8775 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8776}
8777
8778
8779/** Opcode 0x26. */
8780FNIEMOP_DEF(iemOp_seg_ES)
8781{
8782 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8783 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8784 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8785
8786 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8787 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8788}
8789
8790
8791/** Opcode 0x27. */
8792FNIEMOP_DEF(iemOp_daa)
8793{
8794 IEMOP_MNEMONIC(daa_AL, "daa AL");
8795 IEMOP_HLP_NO_64BIT();
8796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8797 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8798 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8799}
8800
8801
8802/** Opcode 0x28. */
8803FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8804{
8805 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8806 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8807}
8808
8809
8810/** Opcode 0x29. */
8811FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8812{
8813 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8814 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8815}
8816
8817
8818/** Opcode 0x2a. */
8819FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8820{
8821 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8822 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8823}
8824
8825
8826/** Opcode 0x2b. */
8827FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8828{
8829 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8831}
8832
8833
8834/** Opcode 0x2c. */
8835FNIEMOP_DEF(iemOp_sub_Al_Ib)
8836{
8837 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8838 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8839}
8840
8841
8842/** Opcode 0x2d. */
8843FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8844{
8845 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8846 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8847}
8848
8849
8850/** Opcode 0x2e. */
8851FNIEMOP_DEF(iemOp_seg_CS)
8852{
8853 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8854 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8855 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8856
8857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8858 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8859}
8860
8861
8862/** Opcode 0x2f. */
8863FNIEMOP_DEF(iemOp_das)
8864{
8865 IEMOP_MNEMONIC(das_AL, "das AL");
8866 IEMOP_HLP_NO_64BIT();
8867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8868 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8869 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8870}
8871
8872
8873/** Opcode 0x30. */
8874FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8875{
8876 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8878 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8879}
8880
8881
8882/** Opcode 0x31. */
8883FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8884{
8885 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8886 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8887 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8888}
8889
8890
8891/** Opcode 0x32. */
8892FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8893{
8894 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8895 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8896 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8897}
8898
8899
8900/** Opcode 0x33. */
8901FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8902{
8903 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8904 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8905 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8906}
8907
8908
8909/** Opcode 0x34. */
8910FNIEMOP_DEF(iemOp_xor_Al_Ib)
8911{
8912 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8914 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8915}
8916
8917
8918/** Opcode 0x35. */
8919FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8920{
8921 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8922 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8923 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8924}
8925
8926
8927/** Opcode 0x36. */
8928FNIEMOP_DEF(iemOp_seg_SS)
8929{
8930 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8931 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8932 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8933
8934 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8935 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8936}
8937
8938
8939/** Opcode 0x37. */
8940FNIEMOP_STUB(iemOp_aaa);
8941
8942
8943/** Opcode 0x38. */
8944FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8945{
8946 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8947 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8948}
8949
8950
8951/** Opcode 0x39. */
8952FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8953{
8954 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8955 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8956}
8957
8958
8959/** Opcode 0x3a. */
8960FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8961{
8962 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8963 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8964}
8965
8966
8967/** Opcode 0x3b. */
8968FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8969{
8970 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8971 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8972}
8973
8974
8975/** Opcode 0x3c. */
8976FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8977{
8978 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8979 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8980}
8981
8982
8983/** Opcode 0x3d. */
8984FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8985{
8986 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8987 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8988}
8989
8990
8991/** Opcode 0x3e. */
8992FNIEMOP_DEF(iemOp_seg_DS)
8993{
8994 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8995 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8996 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8997
8998 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8999 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9000}
9001
9002
9003/** Opcode 0x3f. */
9004FNIEMOP_STUB(iemOp_aas);
9005
9006/**
9007 * Common 'inc/dec/not/neg register' helper.
9008 */
9009FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
9010{
9011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9012 switch (pVCpu->iem.s.enmEffOpSize)
9013 {
9014 case IEMMODE_16BIT:
9015 IEM_MC_BEGIN(2, 0);
9016 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9017 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9018 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9019 IEM_MC_REF_EFLAGS(pEFlags);
9020 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
9021 IEM_MC_ADVANCE_RIP();
9022 IEM_MC_END();
9023 return VINF_SUCCESS;
9024
9025 case IEMMODE_32BIT:
9026 IEM_MC_BEGIN(2, 0);
9027 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9028 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9029 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9030 IEM_MC_REF_EFLAGS(pEFlags);
9031 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
9032 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9033 IEM_MC_ADVANCE_RIP();
9034 IEM_MC_END();
9035 return VINF_SUCCESS;
9036
9037 case IEMMODE_64BIT:
9038 IEM_MC_BEGIN(2, 0);
9039 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9040 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9041 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9042 IEM_MC_REF_EFLAGS(pEFlags);
9043 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
9044 IEM_MC_ADVANCE_RIP();
9045 IEM_MC_END();
9046 return VINF_SUCCESS;
9047 }
9048 return VINF_SUCCESS;
9049}
9050
9051
9052/** Opcode 0x40. */
9053FNIEMOP_DEF(iemOp_inc_eAX)
9054{
9055 /*
9056 * This is a REX prefix in 64-bit mode.
9057 */
9058 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9059 {
9060 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
9061 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
9062
9063 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9064 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9065 }
9066
9067 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
9068 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
9069}
9070
9071
9072/** Opcode 0x41. */
9073FNIEMOP_DEF(iemOp_inc_eCX)
9074{
9075 /*
9076 * This is a REX prefix in 64-bit mode.
9077 */
9078 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9079 {
9080 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
9081 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
9082 pVCpu->iem.s.uRexB = 1 << 3;
9083
9084 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9085 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9086 }
9087
9088 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
9089 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
9090}
9091
9092
9093/** Opcode 0x42. */
9094FNIEMOP_DEF(iemOp_inc_eDX)
9095{
9096 /*
9097 * This is a REX prefix in 64-bit mode.
9098 */
9099 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9100 {
9101 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
9102 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
9103 pVCpu->iem.s.uRexIndex = 1 << 3;
9104
9105 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9106 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9107 }
9108
9109 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
9110 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
9111}
9112
9113
9114
9115/** Opcode 0x43. */
9116FNIEMOP_DEF(iemOp_inc_eBX)
9117{
9118 /*
9119 * This is a REX prefix in 64-bit mode.
9120 */
9121 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9122 {
9123 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
9124 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9125 pVCpu->iem.s.uRexB = 1 << 3;
9126 pVCpu->iem.s.uRexIndex = 1 << 3;
9127
9128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9130 }
9131
9132 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
9133 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
9134}
9135
9136
9137/** Opcode 0x44. */
9138FNIEMOP_DEF(iemOp_inc_eSP)
9139{
9140 /*
9141 * This is a REX prefix in 64-bit mode.
9142 */
9143 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9144 {
9145 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
9146 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
9147 pVCpu->iem.s.uRexReg = 1 << 3;
9148
9149 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9150 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9151 }
9152
9153 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
9154 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
9155}
9156
9157
9158/** Opcode 0x45. */
9159FNIEMOP_DEF(iemOp_inc_eBP)
9160{
9161 /*
9162 * This is a REX prefix in 64-bit mode.
9163 */
9164 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9165 {
9166 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
9167 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
9168 pVCpu->iem.s.uRexReg = 1 << 3;
9169 pVCpu->iem.s.uRexB = 1 << 3;
9170
9171 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9172 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9173 }
9174
9175 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
9176 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
9177}
9178
9179
9180/** Opcode 0x46. */
9181FNIEMOP_DEF(iemOp_inc_eSI)
9182{
9183 /*
9184 * This is a REX prefix in 64-bit mode.
9185 */
9186 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9187 {
9188 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
9189 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
9190 pVCpu->iem.s.uRexReg = 1 << 3;
9191 pVCpu->iem.s.uRexIndex = 1 << 3;
9192
9193 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9194 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9195 }
9196
9197 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
9198 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
9199}
9200
9201
9202/** Opcode 0x47. */
9203FNIEMOP_DEF(iemOp_inc_eDI)
9204{
9205 /*
9206 * This is a REX prefix in 64-bit mode.
9207 */
9208 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9209 {
9210 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9211 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9212 pVCpu->iem.s.uRexReg = 1 << 3;
9213 pVCpu->iem.s.uRexB = 1 << 3;
9214 pVCpu->iem.s.uRexIndex = 1 << 3;
9215
9216 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9217 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9218 }
9219
9220 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9221 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9222}
9223
9224
9225/** Opcode 0x48. */
9226FNIEMOP_DEF(iemOp_dec_eAX)
9227{
9228 /*
9229 * This is a REX prefix in 64-bit mode.
9230 */
9231 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9232 {
9233 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9234 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9235 iemRecalEffOpSize(pVCpu);
9236
9237 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9238 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9239 }
9240
9241 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9242 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9243}
9244
9245
9246/** Opcode 0x49. */
9247FNIEMOP_DEF(iemOp_dec_eCX)
9248{
9249 /*
9250 * This is a REX prefix in 64-bit mode.
9251 */
9252 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9253 {
9254 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9255 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9256 pVCpu->iem.s.uRexB = 1 << 3;
9257 iemRecalEffOpSize(pVCpu);
9258
9259 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9260 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9261 }
9262
9263 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9264 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9265}
9266
9267
9268/** Opcode 0x4a. */
9269FNIEMOP_DEF(iemOp_dec_eDX)
9270{
9271 /*
9272 * This is a REX prefix in 64-bit mode.
9273 */
9274 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9275 {
9276 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9277 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9278 pVCpu->iem.s.uRexIndex = 1 << 3;
9279 iemRecalEffOpSize(pVCpu);
9280
9281 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9282 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9283 }
9284
9285 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9286 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9287}
9288
9289
9290/** Opcode 0x4b. */
9291FNIEMOP_DEF(iemOp_dec_eBX)
9292{
9293 /*
9294 * This is a REX prefix in 64-bit mode.
9295 */
9296 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9297 {
9298 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9299 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9300 pVCpu->iem.s.uRexB = 1 << 3;
9301 pVCpu->iem.s.uRexIndex = 1 << 3;
9302 iemRecalEffOpSize(pVCpu);
9303
9304 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9305 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9306 }
9307
9308 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9309 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9310}
9311
9312
9313/** Opcode 0x4c. */
9314FNIEMOP_DEF(iemOp_dec_eSP)
9315{
9316 /*
9317 * This is a REX prefix in 64-bit mode.
9318 */
9319 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9320 {
9321 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9322 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9323 pVCpu->iem.s.uRexReg = 1 << 3;
9324 iemRecalEffOpSize(pVCpu);
9325
9326 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9327 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9328 }
9329
9330 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9331 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9332}
9333
9334
9335/** Opcode 0x4d. */
9336FNIEMOP_DEF(iemOp_dec_eBP)
9337{
9338 /*
9339 * This is a REX prefix in 64-bit mode.
9340 */
9341 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9342 {
9343 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9344 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9345 pVCpu->iem.s.uRexReg = 1 << 3;
9346 pVCpu->iem.s.uRexB = 1 << 3;
9347 iemRecalEffOpSize(pVCpu);
9348
9349 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9350 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9351 }
9352
9353 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9354 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9355}
9356
9357
9358/** Opcode 0x4e. */
9359FNIEMOP_DEF(iemOp_dec_eSI)
9360{
9361 /*
9362 * This is a REX prefix in 64-bit mode.
9363 */
9364 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9365 {
9366 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9367 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9368 pVCpu->iem.s.uRexReg = 1 << 3;
9369 pVCpu->iem.s.uRexIndex = 1 << 3;
9370 iemRecalEffOpSize(pVCpu);
9371
9372 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9373 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9374 }
9375
9376 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9377 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9378}
9379
9380
9381/** Opcode 0x4f. */
9382FNIEMOP_DEF(iemOp_dec_eDI)
9383{
9384 /*
9385 * This is a REX prefix in 64-bit mode.
9386 */
9387 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9388 {
9389 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9390 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9391 pVCpu->iem.s.uRexReg = 1 << 3;
9392 pVCpu->iem.s.uRexB = 1 << 3;
9393 pVCpu->iem.s.uRexIndex = 1 << 3;
9394 iemRecalEffOpSize(pVCpu);
9395
9396 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9397 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9398 }
9399
9400 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9401 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9402}
9403
9404
9405/**
9406 * Common 'push register' helper.
9407 */
9408FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9409{
9410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9411 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9412 {
9413 iReg |= pVCpu->iem.s.uRexB;
9414 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9415 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9416 }
9417
9418 switch (pVCpu->iem.s.enmEffOpSize)
9419 {
9420 case IEMMODE_16BIT:
9421 IEM_MC_BEGIN(0, 1);
9422 IEM_MC_LOCAL(uint16_t, u16Value);
9423 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9424 IEM_MC_PUSH_U16(u16Value);
9425 IEM_MC_ADVANCE_RIP();
9426 IEM_MC_END();
9427 break;
9428
9429 case IEMMODE_32BIT:
9430 IEM_MC_BEGIN(0, 1);
9431 IEM_MC_LOCAL(uint32_t, u32Value);
9432 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9433 IEM_MC_PUSH_U32(u32Value);
9434 IEM_MC_ADVANCE_RIP();
9435 IEM_MC_END();
9436 break;
9437
9438 case IEMMODE_64BIT:
9439 IEM_MC_BEGIN(0, 1);
9440 IEM_MC_LOCAL(uint64_t, u64Value);
9441 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9442 IEM_MC_PUSH_U64(u64Value);
9443 IEM_MC_ADVANCE_RIP();
9444 IEM_MC_END();
9445 break;
9446 }
9447
9448 return VINF_SUCCESS;
9449}
9450
9451
9452/** Opcode 0x50. */
9453FNIEMOP_DEF(iemOp_push_eAX)
9454{
9455 IEMOP_MNEMONIC(push_rAX, "push rAX");
9456 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9457}
9458
9459
9460/** Opcode 0x51. */
9461FNIEMOP_DEF(iemOp_push_eCX)
9462{
9463 IEMOP_MNEMONIC(push_rCX, "push rCX");
9464 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9465}
9466
9467
9468/** Opcode 0x52. */
9469FNIEMOP_DEF(iemOp_push_eDX)
9470{
9471 IEMOP_MNEMONIC(push_rDX, "push rDX");
9472 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9473}
9474
9475
9476/** Opcode 0x53. */
9477FNIEMOP_DEF(iemOp_push_eBX)
9478{
9479 IEMOP_MNEMONIC(push_rBX, "push rBX");
9480 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9481}
9482
9483
9484/** Opcode 0x54. */
9485FNIEMOP_DEF(iemOp_push_eSP)
9486{
9487 IEMOP_MNEMONIC(push_rSP, "push rSP");
9488 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9489 {
9490 IEM_MC_BEGIN(0, 1);
9491 IEM_MC_LOCAL(uint16_t, u16Value);
9492 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9493 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9494 IEM_MC_PUSH_U16(u16Value);
9495 IEM_MC_ADVANCE_RIP();
9496 IEM_MC_END();
9497 }
9498 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9499}
9500
9501
9502/** Opcode 0x55. */
9503FNIEMOP_DEF(iemOp_push_eBP)
9504{
9505 IEMOP_MNEMONIC(push_rBP, "push rBP");
9506 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9507}
9508
9509
9510/** Opcode 0x56. */
9511FNIEMOP_DEF(iemOp_push_eSI)
9512{
9513 IEMOP_MNEMONIC(push_rSI, "push rSI");
9514 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9515}
9516
9517
9518/** Opcode 0x57. */
9519FNIEMOP_DEF(iemOp_push_eDI)
9520{
9521 IEMOP_MNEMONIC(push_rDI, "push rDI");
9522 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9523}
9524
9525
9526/**
9527 * Common 'pop register' helper.
9528 */
9529FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9530{
9531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9532 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9533 {
9534 iReg |= pVCpu->iem.s.uRexB;
9535 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9536 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9537 }
9538
9539 switch (pVCpu->iem.s.enmEffOpSize)
9540 {
9541 case IEMMODE_16BIT:
9542 IEM_MC_BEGIN(0, 1);
9543 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9544 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9545 IEM_MC_POP_U16(pu16Dst);
9546 IEM_MC_ADVANCE_RIP();
9547 IEM_MC_END();
9548 break;
9549
9550 case IEMMODE_32BIT:
9551 IEM_MC_BEGIN(0, 1);
9552 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9553 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9554 IEM_MC_POP_U32(pu32Dst);
9555 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9556 IEM_MC_ADVANCE_RIP();
9557 IEM_MC_END();
9558 break;
9559
9560 case IEMMODE_64BIT:
9561 IEM_MC_BEGIN(0, 1);
9562 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9563 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9564 IEM_MC_POP_U64(pu64Dst);
9565 IEM_MC_ADVANCE_RIP();
9566 IEM_MC_END();
9567 break;
9568 }
9569
9570 return VINF_SUCCESS;
9571}
9572
9573
9574/** Opcode 0x58. */
9575FNIEMOP_DEF(iemOp_pop_eAX)
9576{
9577 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9578 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9579}
9580
9581
9582/** Opcode 0x59. */
9583FNIEMOP_DEF(iemOp_pop_eCX)
9584{
9585 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9586 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9587}
9588
9589
9590/** Opcode 0x5a. */
9591FNIEMOP_DEF(iemOp_pop_eDX)
9592{
9593 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9594 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9595}
9596
9597
9598/** Opcode 0x5b. */
9599FNIEMOP_DEF(iemOp_pop_eBX)
9600{
9601 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9602 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9603}
9604
9605
9606/** Opcode 0x5c. */
9607FNIEMOP_DEF(iemOp_pop_eSP)
9608{
9609 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9610 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9611 {
9612 if (pVCpu->iem.s.uRexB)
9613 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9614 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9615 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9616 }
9617
9618 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9619 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9620 /** @todo add testcase for this instruction. */
9621 switch (pVCpu->iem.s.enmEffOpSize)
9622 {
9623 case IEMMODE_16BIT:
9624 IEM_MC_BEGIN(0, 1);
9625 IEM_MC_LOCAL(uint16_t, u16Dst);
9626 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9627 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9628 IEM_MC_ADVANCE_RIP();
9629 IEM_MC_END();
9630 break;
9631
9632 case IEMMODE_32BIT:
9633 IEM_MC_BEGIN(0, 1);
9634 IEM_MC_LOCAL(uint32_t, u32Dst);
9635 IEM_MC_POP_U32(&u32Dst);
9636 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9637 IEM_MC_ADVANCE_RIP();
9638 IEM_MC_END();
9639 break;
9640
9641 case IEMMODE_64BIT:
9642 IEM_MC_BEGIN(0, 1);
9643 IEM_MC_LOCAL(uint64_t, u64Dst);
9644 IEM_MC_POP_U64(&u64Dst);
9645 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9646 IEM_MC_ADVANCE_RIP();
9647 IEM_MC_END();
9648 break;
9649 }
9650
9651 return VINF_SUCCESS;
9652}
9653
9654
9655/** Opcode 0x5d. */
9656FNIEMOP_DEF(iemOp_pop_eBP)
9657{
9658 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9659 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9660}
9661
9662
9663/** Opcode 0x5e. */
9664FNIEMOP_DEF(iemOp_pop_eSI)
9665{
9666 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9667 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9668}
9669
9670
9671/** Opcode 0x5f. */
9672FNIEMOP_DEF(iemOp_pop_eDI)
9673{
9674 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9675 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9676}
9677
9678
9679/** Opcode 0x60. */
9680FNIEMOP_DEF(iemOp_pusha)
9681{
9682 IEMOP_MNEMONIC(pusha, "pusha");
9683 IEMOP_HLP_MIN_186();
9684 IEMOP_HLP_NO_64BIT();
9685 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9686 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9687 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9688 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9689}
9690
9691
9692/** Opcode 0x61. */
9693FNIEMOP_DEF(iemOp_popa)
9694{
9695 IEMOP_MNEMONIC(popa, "popa");
9696 IEMOP_HLP_MIN_186();
9697 IEMOP_HLP_NO_64BIT();
9698 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9699 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9700 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9701 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9702}
9703
9704
9705/** Opcode 0x62. */
9706FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9707// IEMOP_HLP_MIN_186();
9708
9709
9710/** Opcode 0x63 - non-64-bit modes. */
9711FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9712{
9713 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9714 IEMOP_HLP_MIN_286();
9715 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9717
9718 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9719 {
9720 /* Register */
9721 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9722 IEM_MC_BEGIN(3, 0);
9723 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9724 IEM_MC_ARG(uint16_t, u16Src, 1);
9725 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9726
9727 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9728 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9729 IEM_MC_REF_EFLAGS(pEFlags);
9730 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9731
9732 IEM_MC_ADVANCE_RIP();
9733 IEM_MC_END();
9734 }
9735 else
9736 {
9737 /* Memory */
9738 IEM_MC_BEGIN(3, 2);
9739 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9740 IEM_MC_ARG(uint16_t, u16Src, 1);
9741 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9743
9744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9745 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9746 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9747 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9748 IEM_MC_FETCH_EFLAGS(EFlags);
9749 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9750
9751 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9752 IEM_MC_COMMIT_EFLAGS(EFlags);
9753 IEM_MC_ADVANCE_RIP();
9754 IEM_MC_END();
9755 }
9756 return VINF_SUCCESS;
9757
9758}
9759
9760
9761/** Opcode 0x63.
9762 * @note This is a weird one. It works like a regular move instruction if
9763 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9764 * @todo This definitely needs a testcase to verify the odd cases. */
9765FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9766{
9767 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9768
9769 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9771
9772 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9773 {
9774 /*
9775 * Register to register.
9776 */
9777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9778 IEM_MC_BEGIN(0, 1);
9779 IEM_MC_LOCAL(uint64_t, u64Value);
9780 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9781 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9782 IEM_MC_ADVANCE_RIP();
9783 IEM_MC_END();
9784 }
9785 else
9786 {
9787 /*
9788 * We're loading a register from memory.
9789 */
9790 IEM_MC_BEGIN(0, 2);
9791 IEM_MC_LOCAL(uint64_t, u64Value);
9792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9795 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9796 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9797 IEM_MC_ADVANCE_RIP();
9798 IEM_MC_END();
9799 }
9800 return VINF_SUCCESS;
9801}
9802
9803
9804/** Opcode 0x64. */
9805FNIEMOP_DEF(iemOp_seg_FS)
9806{
9807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9808 IEMOP_HLP_MIN_386();
9809
9810 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9811 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9812
9813 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9814 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9815}
9816
9817
9818/** Opcode 0x65. */
9819FNIEMOP_DEF(iemOp_seg_GS)
9820{
9821 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9822 IEMOP_HLP_MIN_386();
9823
9824 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9825 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9826
9827 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9828 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9829}
9830
9831
9832/** Opcode 0x66. */
9833FNIEMOP_DEF(iemOp_op_size)
9834{
9835 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9836 IEMOP_HLP_MIN_386();
9837
9838 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9839 iemRecalEffOpSize(pVCpu);
9840
9841 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9842 when REPZ or REPNZ are present. */
9843 if (pVCpu->iem.s.idxPrefix == 0)
9844 pVCpu->iem.s.idxPrefix = 1;
9845
9846 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9847 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9848}
9849
9850
9851/** Opcode 0x67. */
9852FNIEMOP_DEF(iemOp_addr_size)
9853{
9854 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9855 IEMOP_HLP_MIN_386();
9856
9857 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9858 switch (pVCpu->iem.s.enmDefAddrMode)
9859 {
9860 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9861 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9862 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9863 default: AssertFailed();
9864 }
9865
9866 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9867 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9868}
9869
9870
9871/** Opcode 0x68. */
9872FNIEMOP_DEF(iemOp_push_Iz)
9873{
9874 IEMOP_MNEMONIC(push_Iz, "push Iz");
9875 IEMOP_HLP_MIN_186();
9876 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9877 switch (pVCpu->iem.s.enmEffOpSize)
9878 {
9879 case IEMMODE_16BIT:
9880 {
9881 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9883 IEM_MC_BEGIN(0,0);
9884 IEM_MC_PUSH_U16(u16Imm);
9885 IEM_MC_ADVANCE_RIP();
9886 IEM_MC_END();
9887 return VINF_SUCCESS;
9888 }
9889
9890 case IEMMODE_32BIT:
9891 {
9892 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9894 IEM_MC_BEGIN(0,0);
9895 IEM_MC_PUSH_U32(u32Imm);
9896 IEM_MC_ADVANCE_RIP();
9897 IEM_MC_END();
9898 return VINF_SUCCESS;
9899 }
9900
9901 case IEMMODE_64BIT:
9902 {
9903 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9905 IEM_MC_BEGIN(0,0);
9906 IEM_MC_PUSH_U64(u64Imm);
9907 IEM_MC_ADVANCE_RIP();
9908 IEM_MC_END();
9909 return VINF_SUCCESS;
9910 }
9911
9912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9913 }
9914}
9915
9916
9917/** Opcode 0x69. */
9918FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9919{
9920 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9921 IEMOP_HLP_MIN_186();
9922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9923 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9924
9925 switch (pVCpu->iem.s.enmEffOpSize)
9926 {
9927 case IEMMODE_16BIT:
9928 {
9929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9930 {
9931 /* register operand */
9932 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9934
9935 IEM_MC_BEGIN(3, 1);
9936 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9937 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9938 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9939 IEM_MC_LOCAL(uint16_t, u16Tmp);
9940
9941 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9942 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9943 IEM_MC_REF_EFLAGS(pEFlags);
9944 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9945 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9946
9947 IEM_MC_ADVANCE_RIP();
9948 IEM_MC_END();
9949 }
9950 else
9951 {
9952 /* memory operand */
9953 IEM_MC_BEGIN(3, 2);
9954 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9955 IEM_MC_ARG(uint16_t, u16Src, 1);
9956 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9957 IEM_MC_LOCAL(uint16_t, u16Tmp);
9958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9959
9960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9961 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9962 IEM_MC_ASSIGN(u16Src, u16Imm);
9963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9964 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9965 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9966 IEM_MC_REF_EFLAGS(pEFlags);
9967 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9968 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9969
9970 IEM_MC_ADVANCE_RIP();
9971 IEM_MC_END();
9972 }
9973 return VINF_SUCCESS;
9974 }
9975
9976 case IEMMODE_32BIT:
9977 {
9978 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9979 {
9980 /* register operand */
9981 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9983
9984 IEM_MC_BEGIN(3, 1);
9985 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9986 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9987 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9988 IEM_MC_LOCAL(uint32_t, u32Tmp);
9989
9990 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9991 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9992 IEM_MC_REF_EFLAGS(pEFlags);
9993 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9994 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9995
9996 IEM_MC_ADVANCE_RIP();
9997 IEM_MC_END();
9998 }
9999 else
10000 {
10001 /* memory operand */
10002 IEM_MC_BEGIN(3, 2);
10003 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10004 IEM_MC_ARG(uint32_t, u32Src, 1);
10005 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10006 IEM_MC_LOCAL(uint32_t, u32Tmp);
10007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10008
10009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10010 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10011 IEM_MC_ASSIGN(u32Src, u32Imm);
10012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10013 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10014 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10015 IEM_MC_REF_EFLAGS(pEFlags);
10016 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10017 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10018
10019 IEM_MC_ADVANCE_RIP();
10020 IEM_MC_END();
10021 }
10022 return VINF_SUCCESS;
10023 }
10024
10025 case IEMMODE_64BIT:
10026 {
10027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10028 {
10029 /* register operand */
10030 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10032
10033 IEM_MC_BEGIN(3, 1);
10034 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10035 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
10036 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10037 IEM_MC_LOCAL(uint64_t, u64Tmp);
10038
10039 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10040 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10041 IEM_MC_REF_EFLAGS(pEFlags);
10042 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10043 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10044
10045 IEM_MC_ADVANCE_RIP();
10046 IEM_MC_END();
10047 }
10048 else
10049 {
10050 /* memory operand */
10051 IEM_MC_BEGIN(3, 2);
10052 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10053 IEM_MC_ARG(uint64_t, u64Src, 1);
10054 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10055 IEM_MC_LOCAL(uint64_t, u64Tmp);
10056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10057
10058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10059 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10060 IEM_MC_ASSIGN(u64Src, u64Imm);
10061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10062 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10063 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10064 IEM_MC_REF_EFLAGS(pEFlags);
10065 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10066 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10067
10068 IEM_MC_ADVANCE_RIP();
10069 IEM_MC_END();
10070 }
10071 return VINF_SUCCESS;
10072 }
10073 }
10074 AssertFailedReturn(VERR_IEM_IPE_9);
10075}
10076
10077
10078/** Opcode 0x6a. */
10079FNIEMOP_DEF(iemOp_push_Ib)
10080{
10081 IEMOP_MNEMONIC(push_Ib, "push Ib");
10082 IEMOP_HLP_MIN_186();
10083 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10086
10087 IEM_MC_BEGIN(0,0);
10088 switch (pVCpu->iem.s.enmEffOpSize)
10089 {
10090 case IEMMODE_16BIT:
10091 IEM_MC_PUSH_U16(i8Imm);
10092 break;
10093 case IEMMODE_32BIT:
10094 IEM_MC_PUSH_U32(i8Imm);
10095 break;
10096 case IEMMODE_64BIT:
10097 IEM_MC_PUSH_U64(i8Imm);
10098 break;
10099 }
10100 IEM_MC_ADVANCE_RIP();
10101 IEM_MC_END();
10102 return VINF_SUCCESS;
10103}
10104
10105
10106/** Opcode 0x6b. */
10107FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
10108{
10109 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
10110 IEMOP_HLP_MIN_186();
10111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10112 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10113
10114 switch (pVCpu->iem.s.enmEffOpSize)
10115 {
10116 case IEMMODE_16BIT:
10117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10118 {
10119 /* register operand */
10120 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10122
10123 IEM_MC_BEGIN(3, 1);
10124 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10125 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
10126 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10127 IEM_MC_LOCAL(uint16_t, u16Tmp);
10128
10129 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10130 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10131 IEM_MC_REF_EFLAGS(pEFlags);
10132 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10133 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10134
10135 IEM_MC_ADVANCE_RIP();
10136 IEM_MC_END();
10137 }
10138 else
10139 {
10140 /* memory operand */
10141 IEM_MC_BEGIN(3, 2);
10142 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10143 IEM_MC_ARG(uint16_t, u16Src, 1);
10144 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10145 IEM_MC_LOCAL(uint16_t, u16Tmp);
10146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10147
10148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10149 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
10150 IEM_MC_ASSIGN(u16Src, u16Imm);
10151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10152 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10153 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10154 IEM_MC_REF_EFLAGS(pEFlags);
10155 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10156 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10157
10158 IEM_MC_ADVANCE_RIP();
10159 IEM_MC_END();
10160 }
10161 return VINF_SUCCESS;
10162
10163 case IEMMODE_32BIT:
10164 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10165 {
10166 /* register operand */
10167 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10169
10170 IEM_MC_BEGIN(3, 1);
10171 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10172 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
10173 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10174 IEM_MC_LOCAL(uint32_t, u32Tmp);
10175
10176 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10177 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10178 IEM_MC_REF_EFLAGS(pEFlags);
10179 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10180 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10181
10182 IEM_MC_ADVANCE_RIP();
10183 IEM_MC_END();
10184 }
10185 else
10186 {
10187 /* memory operand */
10188 IEM_MC_BEGIN(3, 2);
10189 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10190 IEM_MC_ARG(uint32_t, u32Src, 1);
10191 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10192 IEM_MC_LOCAL(uint32_t, u32Tmp);
10193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10194
10195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10196 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
10197 IEM_MC_ASSIGN(u32Src, u32Imm);
10198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10199 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10200 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10201 IEM_MC_REF_EFLAGS(pEFlags);
10202 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10203 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10204
10205 IEM_MC_ADVANCE_RIP();
10206 IEM_MC_END();
10207 }
10208 return VINF_SUCCESS;
10209
10210 case IEMMODE_64BIT:
10211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10212 {
10213 /* register operand */
10214 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10216
10217 IEM_MC_BEGIN(3, 1);
10218 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10219 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10220 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10221 IEM_MC_LOCAL(uint64_t, u64Tmp);
10222
10223 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10224 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10225 IEM_MC_REF_EFLAGS(pEFlags);
10226 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10227 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10228
10229 IEM_MC_ADVANCE_RIP();
10230 IEM_MC_END();
10231 }
10232 else
10233 {
10234 /* memory operand */
10235 IEM_MC_BEGIN(3, 2);
10236 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10237 IEM_MC_ARG(uint64_t, u64Src, 1);
10238 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10239 IEM_MC_LOCAL(uint64_t, u64Tmp);
10240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10241
10242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10243 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10244 IEM_MC_ASSIGN(u64Src, u64Imm);
10245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10246 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10247 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10248 IEM_MC_REF_EFLAGS(pEFlags);
10249 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10250 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10251
10252 IEM_MC_ADVANCE_RIP();
10253 IEM_MC_END();
10254 }
10255 return VINF_SUCCESS;
10256 }
10257 AssertFailedReturn(VERR_IEM_IPE_8);
10258}
10259
10260
10261/** Opcode 0x6c. */
10262FNIEMOP_DEF(iemOp_insb_Yb_DX)
10263{
10264 IEMOP_HLP_MIN_186();
10265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10266 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10267 {
10268 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10269 switch (pVCpu->iem.s.enmEffAddrMode)
10270 {
10271 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10272 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10273 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10275 }
10276 }
10277 else
10278 {
10279 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10280 switch (pVCpu->iem.s.enmEffAddrMode)
10281 {
10282 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10283 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10284 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10286 }
10287 }
10288}
10289
10290
10291/** Opcode 0x6d. */
10292FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10293{
10294 IEMOP_HLP_MIN_186();
10295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10296 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10297 {
10298 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10299 switch (pVCpu->iem.s.enmEffOpSize)
10300 {
10301 case IEMMODE_16BIT:
10302 switch (pVCpu->iem.s.enmEffAddrMode)
10303 {
10304 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10305 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10306 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10308 }
10309 break;
10310 case IEMMODE_64BIT:
10311 case IEMMODE_32BIT:
10312 switch (pVCpu->iem.s.enmEffAddrMode)
10313 {
10314 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10315 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10316 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10318 }
10319 break;
10320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10321 }
10322 }
10323 else
10324 {
10325 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10326 switch (pVCpu->iem.s.enmEffOpSize)
10327 {
10328 case IEMMODE_16BIT:
10329 switch (pVCpu->iem.s.enmEffAddrMode)
10330 {
10331 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10332 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10333 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10335 }
10336 break;
10337 case IEMMODE_64BIT:
10338 case IEMMODE_32BIT:
10339 switch (pVCpu->iem.s.enmEffAddrMode)
10340 {
10341 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10342 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10343 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10345 }
10346 break;
10347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10348 }
10349 }
10350}
10351
10352
10353/** Opcode 0x6e. */
10354FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10355{
10356 IEMOP_HLP_MIN_186();
10357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10358 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10359 {
10360 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10361 switch (pVCpu->iem.s.enmEffAddrMode)
10362 {
10363 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10364 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10365 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10367 }
10368 }
10369 else
10370 {
10371 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10372 switch (pVCpu->iem.s.enmEffAddrMode)
10373 {
10374 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10375 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10376 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10378 }
10379 }
10380}
10381
10382
10383/** Opcode 0x6f. */
10384FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10385{
10386 IEMOP_HLP_MIN_186();
10387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10388 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10389 {
10390 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10391 switch (pVCpu->iem.s.enmEffOpSize)
10392 {
10393 case IEMMODE_16BIT:
10394 switch (pVCpu->iem.s.enmEffAddrMode)
10395 {
10396 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10397 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10398 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10400 }
10401 break;
10402 case IEMMODE_64BIT:
10403 case IEMMODE_32BIT:
10404 switch (pVCpu->iem.s.enmEffAddrMode)
10405 {
10406 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10407 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10408 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10410 }
10411 break;
10412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10413 }
10414 }
10415 else
10416 {
10417 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10418 switch (pVCpu->iem.s.enmEffOpSize)
10419 {
10420 case IEMMODE_16BIT:
10421 switch (pVCpu->iem.s.enmEffAddrMode)
10422 {
10423 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10424 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10425 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10427 }
10428 break;
10429 case IEMMODE_64BIT:
10430 case IEMMODE_32BIT:
10431 switch (pVCpu->iem.s.enmEffAddrMode)
10432 {
10433 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10434 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10435 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10437 }
10438 break;
10439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10440 }
10441 }
10442}
10443
10444
10445/** Opcode 0x70. */
10446FNIEMOP_DEF(iemOp_jo_Jb)
10447{
10448 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10449 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10451 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10452
10453 IEM_MC_BEGIN(0, 0);
10454 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10455 IEM_MC_REL_JMP_S8(i8Imm);
10456 } IEM_MC_ELSE() {
10457 IEM_MC_ADVANCE_RIP();
10458 } IEM_MC_ENDIF();
10459 IEM_MC_END();
10460 return VINF_SUCCESS;
10461}
10462
10463
10464/** Opcode 0x71. */
10465FNIEMOP_DEF(iemOp_jno_Jb)
10466{
10467 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10468 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10471
10472 IEM_MC_BEGIN(0, 0);
10473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10474 IEM_MC_ADVANCE_RIP();
10475 } IEM_MC_ELSE() {
10476 IEM_MC_REL_JMP_S8(i8Imm);
10477 } IEM_MC_ENDIF();
10478 IEM_MC_END();
10479 return VINF_SUCCESS;
10480}
10481
10482/** Opcode 0x72. */
10483FNIEMOP_DEF(iemOp_jc_Jb)
10484{
10485 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10486 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10489
10490 IEM_MC_BEGIN(0, 0);
10491 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10492 IEM_MC_REL_JMP_S8(i8Imm);
10493 } IEM_MC_ELSE() {
10494 IEM_MC_ADVANCE_RIP();
10495 } IEM_MC_ENDIF();
10496 IEM_MC_END();
10497 return VINF_SUCCESS;
10498}
10499
10500
10501/** Opcode 0x73. */
10502FNIEMOP_DEF(iemOp_jnc_Jb)
10503{
10504 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10505 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10507 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10508
10509 IEM_MC_BEGIN(0, 0);
10510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10511 IEM_MC_ADVANCE_RIP();
10512 } IEM_MC_ELSE() {
10513 IEM_MC_REL_JMP_S8(i8Imm);
10514 } IEM_MC_ENDIF();
10515 IEM_MC_END();
10516 return VINF_SUCCESS;
10517}
10518
10519
10520/** Opcode 0x74. */
10521FNIEMOP_DEF(iemOp_je_Jb)
10522{
10523 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10524 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10526 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10527
10528 IEM_MC_BEGIN(0, 0);
10529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10530 IEM_MC_REL_JMP_S8(i8Imm);
10531 } IEM_MC_ELSE() {
10532 IEM_MC_ADVANCE_RIP();
10533 } IEM_MC_ENDIF();
10534 IEM_MC_END();
10535 return VINF_SUCCESS;
10536}
10537
10538
10539/** Opcode 0x75. */
10540FNIEMOP_DEF(iemOp_jne_Jb)
10541{
10542 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10543 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10545 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10546
10547 IEM_MC_BEGIN(0, 0);
10548 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10549 IEM_MC_ADVANCE_RIP();
10550 } IEM_MC_ELSE() {
10551 IEM_MC_REL_JMP_S8(i8Imm);
10552 } IEM_MC_ENDIF();
10553 IEM_MC_END();
10554 return VINF_SUCCESS;
10555}
10556
10557
10558/** Opcode 0x76. */
10559FNIEMOP_DEF(iemOp_jbe_Jb)
10560{
10561 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10562 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10564 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10565
10566 IEM_MC_BEGIN(0, 0);
10567 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10568 IEM_MC_REL_JMP_S8(i8Imm);
10569 } IEM_MC_ELSE() {
10570 IEM_MC_ADVANCE_RIP();
10571 } IEM_MC_ENDIF();
10572 IEM_MC_END();
10573 return VINF_SUCCESS;
10574}
10575
10576
10577/** Opcode 0x77. */
10578FNIEMOP_DEF(iemOp_jnbe_Jb)
10579{
10580 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10581 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10583 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10584
10585 IEM_MC_BEGIN(0, 0);
10586 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10587 IEM_MC_ADVANCE_RIP();
10588 } IEM_MC_ELSE() {
10589 IEM_MC_REL_JMP_S8(i8Imm);
10590 } IEM_MC_ENDIF();
10591 IEM_MC_END();
10592 return VINF_SUCCESS;
10593}
10594
10595
10596/** Opcode 0x78. */
10597FNIEMOP_DEF(iemOp_js_Jb)
10598{
10599 IEMOP_MNEMONIC(js_Jb, "js Jb");
10600 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10602 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10603
10604 IEM_MC_BEGIN(0, 0);
10605 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10606 IEM_MC_REL_JMP_S8(i8Imm);
10607 } IEM_MC_ELSE() {
10608 IEM_MC_ADVANCE_RIP();
10609 } IEM_MC_ENDIF();
10610 IEM_MC_END();
10611 return VINF_SUCCESS;
10612}
10613
10614
10615/** Opcode 0x79. */
10616FNIEMOP_DEF(iemOp_jns_Jb)
10617{
10618 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10619 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10621 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10622
10623 IEM_MC_BEGIN(0, 0);
10624 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10625 IEM_MC_ADVANCE_RIP();
10626 } IEM_MC_ELSE() {
10627 IEM_MC_REL_JMP_S8(i8Imm);
10628 } IEM_MC_ENDIF();
10629 IEM_MC_END();
10630 return VINF_SUCCESS;
10631}
10632
10633
10634/** Opcode 0x7a. */
10635FNIEMOP_DEF(iemOp_jp_Jb)
10636{
10637 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10638 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10641
10642 IEM_MC_BEGIN(0, 0);
10643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10644 IEM_MC_REL_JMP_S8(i8Imm);
10645 } IEM_MC_ELSE() {
10646 IEM_MC_ADVANCE_RIP();
10647 } IEM_MC_ENDIF();
10648 IEM_MC_END();
10649 return VINF_SUCCESS;
10650}
10651
10652
10653/** Opcode 0x7b. */
10654FNIEMOP_DEF(iemOp_jnp_Jb)
10655{
10656 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10657 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10660
10661 IEM_MC_BEGIN(0, 0);
10662 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10663 IEM_MC_ADVANCE_RIP();
10664 } IEM_MC_ELSE() {
10665 IEM_MC_REL_JMP_S8(i8Imm);
10666 } IEM_MC_ENDIF();
10667 IEM_MC_END();
10668 return VINF_SUCCESS;
10669}
10670
10671
10672/** Opcode 0x7c. */
10673FNIEMOP_DEF(iemOp_jl_Jb)
10674{
10675 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10676 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10678 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10679
10680 IEM_MC_BEGIN(0, 0);
10681 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10682 IEM_MC_REL_JMP_S8(i8Imm);
10683 } IEM_MC_ELSE() {
10684 IEM_MC_ADVANCE_RIP();
10685 } IEM_MC_ENDIF();
10686 IEM_MC_END();
10687 return VINF_SUCCESS;
10688}
10689
10690
10691/** Opcode 0x7d. */
10692FNIEMOP_DEF(iemOp_jnl_Jb)
10693{
10694 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10695 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10698
10699 IEM_MC_BEGIN(0, 0);
10700 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10701 IEM_MC_ADVANCE_RIP();
10702 } IEM_MC_ELSE() {
10703 IEM_MC_REL_JMP_S8(i8Imm);
10704 } IEM_MC_ENDIF();
10705 IEM_MC_END();
10706 return VINF_SUCCESS;
10707}
10708
10709
10710/** Opcode 0x7e. */
10711FNIEMOP_DEF(iemOp_jle_Jb)
10712{
10713 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10714 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10717
10718 IEM_MC_BEGIN(0, 0);
10719 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10720 IEM_MC_REL_JMP_S8(i8Imm);
10721 } IEM_MC_ELSE() {
10722 IEM_MC_ADVANCE_RIP();
10723 } IEM_MC_ENDIF();
10724 IEM_MC_END();
10725 return VINF_SUCCESS;
10726}
10727
10728
10729/** Opcode 0x7f. */
10730FNIEMOP_DEF(iemOp_jnle_Jb)
10731{
10732 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10733 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10736
10737 IEM_MC_BEGIN(0, 0);
10738 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10739 IEM_MC_ADVANCE_RIP();
10740 } IEM_MC_ELSE() {
10741 IEM_MC_REL_JMP_S8(i8Imm);
10742 } IEM_MC_ENDIF();
10743 IEM_MC_END();
10744 return VINF_SUCCESS;
10745}
10746
10747
10748/** Opcode 0x80. */
10749FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10750{
10751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10752 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10753 {
10754 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10755 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10756 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10757 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10758 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10759 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10760 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10761 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10762 }
10763 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10764
10765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10766 {
10767 /* register target */
10768 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10770 IEM_MC_BEGIN(3, 0);
10771 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10772 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10773 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10774
10775 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10776 IEM_MC_REF_EFLAGS(pEFlags);
10777 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10778
10779 IEM_MC_ADVANCE_RIP();
10780 IEM_MC_END();
10781 }
10782 else
10783 {
10784 /* memory target */
10785 uint32_t fAccess;
10786 if (pImpl->pfnLockedU8)
10787 fAccess = IEM_ACCESS_DATA_RW;
10788 else /* CMP */
10789 fAccess = IEM_ACCESS_DATA_R;
10790 IEM_MC_BEGIN(3, 2);
10791 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10792 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10794
10795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10796 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10797 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10798 if (pImpl->pfnLockedU8)
10799 IEMOP_HLP_DONE_DECODING();
10800 else
10801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10802
10803 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10804 IEM_MC_FETCH_EFLAGS(EFlags);
10805 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10806 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10807 else
10808 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10809
10810 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10811 IEM_MC_COMMIT_EFLAGS(EFlags);
10812 IEM_MC_ADVANCE_RIP();
10813 IEM_MC_END();
10814 }
10815 return VINF_SUCCESS;
10816}
10817
10818
10819/** Opcode 0x81. */
10820FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10821{
10822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10823 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10824 {
10825 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10826 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10827 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10828 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10829 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10830 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10831 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10832 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10833 }
10834 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10835
10836 switch (pVCpu->iem.s.enmEffOpSize)
10837 {
10838 case IEMMODE_16BIT:
10839 {
10840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10841 {
10842 /* register target */
10843 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10845 IEM_MC_BEGIN(3, 0);
10846 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10847 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10848 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10849
10850 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10851 IEM_MC_REF_EFLAGS(pEFlags);
10852 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10853
10854 IEM_MC_ADVANCE_RIP();
10855 IEM_MC_END();
10856 }
10857 else
10858 {
10859 /* memory target */
10860 uint32_t fAccess;
10861 if (pImpl->pfnLockedU16)
10862 fAccess = IEM_ACCESS_DATA_RW;
10863 else /* CMP, TEST */
10864 fAccess = IEM_ACCESS_DATA_R;
10865 IEM_MC_BEGIN(3, 2);
10866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10867 IEM_MC_ARG(uint16_t, u16Src, 1);
10868 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10870
10871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10872 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10873 IEM_MC_ASSIGN(u16Src, u16Imm);
10874 if (pImpl->pfnLockedU16)
10875 IEMOP_HLP_DONE_DECODING();
10876 else
10877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10878 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10879 IEM_MC_FETCH_EFLAGS(EFlags);
10880 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10881 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10882 else
10883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10884
10885 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10886 IEM_MC_COMMIT_EFLAGS(EFlags);
10887 IEM_MC_ADVANCE_RIP();
10888 IEM_MC_END();
10889 }
10890 break;
10891 }
10892
10893 case IEMMODE_32BIT:
10894 {
10895 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10896 {
10897 /* register target */
10898 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10900 IEM_MC_BEGIN(3, 0);
10901 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10902 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10903 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10904
10905 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10906 IEM_MC_REF_EFLAGS(pEFlags);
10907 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10908 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10909
10910 IEM_MC_ADVANCE_RIP();
10911 IEM_MC_END();
10912 }
10913 else
10914 {
10915 /* memory target */
10916 uint32_t fAccess;
10917 if (pImpl->pfnLockedU32)
10918 fAccess = IEM_ACCESS_DATA_RW;
10919 else /* CMP, TEST */
10920 fAccess = IEM_ACCESS_DATA_R;
10921 IEM_MC_BEGIN(3, 2);
10922 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10923 IEM_MC_ARG(uint32_t, u32Src, 1);
10924 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10926
10927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10928 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10929 IEM_MC_ASSIGN(u32Src, u32Imm);
10930 if (pImpl->pfnLockedU32)
10931 IEMOP_HLP_DONE_DECODING();
10932 else
10933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10934 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10935 IEM_MC_FETCH_EFLAGS(EFlags);
10936 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10938 else
10939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10940
10941 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10942 IEM_MC_COMMIT_EFLAGS(EFlags);
10943 IEM_MC_ADVANCE_RIP();
10944 IEM_MC_END();
10945 }
10946 break;
10947 }
10948
10949 case IEMMODE_64BIT:
10950 {
10951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10952 {
10953 /* register target */
10954 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10956 IEM_MC_BEGIN(3, 0);
10957 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10958 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10959 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10960
10961 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10962 IEM_MC_REF_EFLAGS(pEFlags);
10963 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10964
10965 IEM_MC_ADVANCE_RIP();
10966 IEM_MC_END();
10967 }
10968 else
10969 {
10970 /* memory target */
10971 uint32_t fAccess;
10972 if (pImpl->pfnLockedU64)
10973 fAccess = IEM_ACCESS_DATA_RW;
10974 else /* CMP */
10975 fAccess = IEM_ACCESS_DATA_R;
10976 IEM_MC_BEGIN(3, 2);
10977 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10978 IEM_MC_ARG(uint64_t, u64Src, 1);
10979 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10981
10982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10983 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10984 if (pImpl->pfnLockedU64)
10985 IEMOP_HLP_DONE_DECODING();
10986 else
10987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10988 IEM_MC_ASSIGN(u64Src, u64Imm);
10989 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10990 IEM_MC_FETCH_EFLAGS(EFlags);
10991 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10992 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10993 else
10994 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10995
10996 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10997 IEM_MC_COMMIT_EFLAGS(EFlags);
10998 IEM_MC_ADVANCE_RIP();
10999 IEM_MC_END();
11000 }
11001 break;
11002 }
11003 }
11004 return VINF_SUCCESS;
11005}
11006
11007
11008/** Opcode 0x82. */
11009FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
11010{
11011 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
11012 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
11013}
11014
11015
11016/** Opcode 0x83. */
11017FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
11018{
11019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11020 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11021 {
11022 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
11023 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
11024 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
11025 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
11026 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
11027 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
11028 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
11029 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
11030 }
11031 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
11032 to the 386 even if absent in the intel reference manuals and some
11033 3rd party opcode listings. */
11034 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
11035
11036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11037 {
11038 /*
11039 * Register target
11040 */
11041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11042 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11043 switch (pVCpu->iem.s.enmEffOpSize)
11044 {
11045 case IEMMODE_16BIT:
11046 {
11047 IEM_MC_BEGIN(3, 0);
11048 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11049 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
11050 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11051
11052 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11053 IEM_MC_REF_EFLAGS(pEFlags);
11054 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11055
11056 IEM_MC_ADVANCE_RIP();
11057 IEM_MC_END();
11058 break;
11059 }
11060
11061 case IEMMODE_32BIT:
11062 {
11063 IEM_MC_BEGIN(3, 0);
11064 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11065 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
11066 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11067
11068 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11069 IEM_MC_REF_EFLAGS(pEFlags);
11070 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11071 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11072
11073 IEM_MC_ADVANCE_RIP();
11074 IEM_MC_END();
11075 break;
11076 }
11077
11078 case IEMMODE_64BIT:
11079 {
11080 IEM_MC_BEGIN(3, 0);
11081 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11082 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
11083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11084
11085 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11086 IEM_MC_REF_EFLAGS(pEFlags);
11087 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11088
11089 IEM_MC_ADVANCE_RIP();
11090 IEM_MC_END();
11091 break;
11092 }
11093 }
11094 }
11095 else
11096 {
11097 /*
11098 * Memory target.
11099 */
11100 uint32_t fAccess;
11101 if (pImpl->pfnLockedU16)
11102 fAccess = IEM_ACCESS_DATA_RW;
11103 else /* CMP */
11104 fAccess = IEM_ACCESS_DATA_R;
11105
11106 switch (pVCpu->iem.s.enmEffOpSize)
11107 {
11108 case IEMMODE_16BIT:
11109 {
11110 IEM_MC_BEGIN(3, 2);
11111 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11112 IEM_MC_ARG(uint16_t, u16Src, 1);
11113 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11115
11116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11117 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11118 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
11119 if (pImpl->pfnLockedU16)
11120 IEMOP_HLP_DONE_DECODING();
11121 else
11122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11123 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11124 IEM_MC_FETCH_EFLAGS(EFlags);
11125 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11126 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11127 else
11128 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
11129
11130 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
11131 IEM_MC_COMMIT_EFLAGS(EFlags);
11132 IEM_MC_ADVANCE_RIP();
11133 IEM_MC_END();
11134 break;
11135 }
11136
11137 case IEMMODE_32BIT:
11138 {
11139 IEM_MC_BEGIN(3, 2);
11140 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11141 IEM_MC_ARG(uint32_t, u32Src, 1);
11142 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11144
11145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11146 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11147 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
11148 if (pImpl->pfnLockedU32)
11149 IEMOP_HLP_DONE_DECODING();
11150 else
11151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11152 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11153 IEM_MC_FETCH_EFLAGS(EFlags);
11154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11155 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11156 else
11157 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
11158
11159 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
11160 IEM_MC_COMMIT_EFLAGS(EFlags);
11161 IEM_MC_ADVANCE_RIP();
11162 IEM_MC_END();
11163 break;
11164 }
11165
11166 case IEMMODE_64BIT:
11167 {
11168 IEM_MC_BEGIN(3, 2);
11169 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11170 IEM_MC_ARG(uint64_t, u64Src, 1);
11171 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11173
11174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11175 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11176 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
11177 if (pImpl->pfnLockedU64)
11178 IEMOP_HLP_DONE_DECODING();
11179 else
11180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11181 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11182 IEM_MC_FETCH_EFLAGS(EFlags);
11183 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11185 else
11186 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11187
11188 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11189 IEM_MC_COMMIT_EFLAGS(EFlags);
11190 IEM_MC_ADVANCE_RIP();
11191 IEM_MC_END();
11192 break;
11193 }
11194 }
11195 }
11196 return VINF_SUCCESS;
11197}
11198
11199
11200/** Opcode 0x84. */
11201FNIEMOP_DEF(iemOp_test_Eb_Gb)
11202{
11203 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11204 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11205 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11206}
11207
11208
11209/** Opcode 0x85. */
11210FNIEMOP_DEF(iemOp_test_Ev_Gv)
11211{
11212 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11213 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11214 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11215}
11216
11217
11218/** Opcode 0x86. */
11219FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11220{
11221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11222 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11223
11224 /*
11225 * If rm is denoting a register, no more instruction bytes.
11226 */
11227 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11228 {
11229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11230
11231 IEM_MC_BEGIN(0, 2);
11232 IEM_MC_LOCAL(uint8_t, uTmp1);
11233 IEM_MC_LOCAL(uint8_t, uTmp2);
11234
11235 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11236 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11237 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11238 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11239
11240 IEM_MC_ADVANCE_RIP();
11241 IEM_MC_END();
11242 }
11243 else
11244 {
11245 /*
11246 * We're accessing memory.
11247 */
11248/** @todo the register must be committed separately! */
11249 IEM_MC_BEGIN(2, 2);
11250 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11251 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11253
11254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11255 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11256 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11257 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11258 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11259
11260 IEM_MC_ADVANCE_RIP();
11261 IEM_MC_END();
11262 }
11263 return VINF_SUCCESS;
11264}
11265
11266
11267/** Opcode 0x87. */
11268FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11269{
11270 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11272
11273 /*
11274 * If rm is denoting a register, no more instruction bytes.
11275 */
11276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11277 {
11278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11279
11280 switch (pVCpu->iem.s.enmEffOpSize)
11281 {
11282 case IEMMODE_16BIT:
11283 IEM_MC_BEGIN(0, 2);
11284 IEM_MC_LOCAL(uint16_t, uTmp1);
11285 IEM_MC_LOCAL(uint16_t, uTmp2);
11286
11287 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11288 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11289 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11290 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11291
11292 IEM_MC_ADVANCE_RIP();
11293 IEM_MC_END();
11294 return VINF_SUCCESS;
11295
11296 case IEMMODE_32BIT:
11297 IEM_MC_BEGIN(0, 2);
11298 IEM_MC_LOCAL(uint32_t, uTmp1);
11299 IEM_MC_LOCAL(uint32_t, uTmp2);
11300
11301 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11302 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11303 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11304 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11305
11306 IEM_MC_ADVANCE_RIP();
11307 IEM_MC_END();
11308 return VINF_SUCCESS;
11309
11310 case IEMMODE_64BIT:
11311 IEM_MC_BEGIN(0, 2);
11312 IEM_MC_LOCAL(uint64_t, uTmp1);
11313 IEM_MC_LOCAL(uint64_t, uTmp2);
11314
11315 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11316 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11317 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11318 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11319
11320 IEM_MC_ADVANCE_RIP();
11321 IEM_MC_END();
11322 return VINF_SUCCESS;
11323
11324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11325 }
11326 }
11327 else
11328 {
11329 /*
11330 * We're accessing memory.
11331 */
11332 switch (pVCpu->iem.s.enmEffOpSize)
11333 {
11334/** @todo the register must be committed separately! */
11335 case IEMMODE_16BIT:
11336 IEM_MC_BEGIN(2, 2);
11337 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11338 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11340
11341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11342 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11343 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11344 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11345 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11346
11347 IEM_MC_ADVANCE_RIP();
11348 IEM_MC_END();
11349 return VINF_SUCCESS;
11350
11351 case IEMMODE_32BIT:
11352 IEM_MC_BEGIN(2, 2);
11353 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11354 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11356
11357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11358 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11359 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11360 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11361 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11362
11363 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11364 IEM_MC_ADVANCE_RIP();
11365 IEM_MC_END();
11366 return VINF_SUCCESS;
11367
11368 case IEMMODE_64BIT:
11369 IEM_MC_BEGIN(2, 2);
11370 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11371 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11373
11374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11375 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11376 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11377 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11378 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11379
11380 IEM_MC_ADVANCE_RIP();
11381 IEM_MC_END();
11382 return VINF_SUCCESS;
11383
11384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11385 }
11386 }
11387}
11388
11389
11390/** Opcode 0x88. */
11391FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11392{
11393 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11394
11395 uint8_t bRm;
11396 IEM_OPCODE_GET_NEXT_U8(&bRm);
11397
11398 /*
11399 * If rm is denoting a register, no more instruction bytes.
11400 */
11401 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11402 {
11403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11404 IEM_MC_BEGIN(0, 1);
11405 IEM_MC_LOCAL(uint8_t, u8Value);
11406 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11407 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11408 IEM_MC_ADVANCE_RIP();
11409 IEM_MC_END();
11410 }
11411 else
11412 {
11413 /*
11414 * We're writing a register to memory.
11415 */
11416 IEM_MC_BEGIN(0, 2);
11417 IEM_MC_LOCAL(uint8_t, u8Value);
11418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11421 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11422 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11423 IEM_MC_ADVANCE_RIP();
11424 IEM_MC_END();
11425 }
11426 return VINF_SUCCESS;
11427
11428}
11429
11430
11431/** Opcode 0x89. */
11432FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11433{
11434 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11435
11436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11437
11438 /*
11439 * If rm is denoting a register, no more instruction bytes.
11440 */
11441 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11442 {
11443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11444 switch (pVCpu->iem.s.enmEffOpSize)
11445 {
11446 case IEMMODE_16BIT:
11447 IEM_MC_BEGIN(0, 1);
11448 IEM_MC_LOCAL(uint16_t, u16Value);
11449 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11450 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11451 IEM_MC_ADVANCE_RIP();
11452 IEM_MC_END();
11453 break;
11454
11455 case IEMMODE_32BIT:
11456 IEM_MC_BEGIN(0, 1);
11457 IEM_MC_LOCAL(uint32_t, u32Value);
11458 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11459 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11460 IEM_MC_ADVANCE_RIP();
11461 IEM_MC_END();
11462 break;
11463
11464 case IEMMODE_64BIT:
11465 IEM_MC_BEGIN(0, 1);
11466 IEM_MC_LOCAL(uint64_t, u64Value);
11467 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11468 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11469 IEM_MC_ADVANCE_RIP();
11470 IEM_MC_END();
11471 break;
11472 }
11473 }
11474 else
11475 {
11476 /*
11477 * We're writing a register to memory.
11478 */
11479 switch (pVCpu->iem.s.enmEffOpSize)
11480 {
11481 case IEMMODE_16BIT:
11482 IEM_MC_BEGIN(0, 2);
11483 IEM_MC_LOCAL(uint16_t, u16Value);
11484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11487 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11488 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11489 IEM_MC_ADVANCE_RIP();
11490 IEM_MC_END();
11491 break;
11492
11493 case IEMMODE_32BIT:
11494 IEM_MC_BEGIN(0, 2);
11495 IEM_MC_LOCAL(uint32_t, u32Value);
11496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11499 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11500 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11501 IEM_MC_ADVANCE_RIP();
11502 IEM_MC_END();
11503 break;
11504
11505 case IEMMODE_64BIT:
11506 IEM_MC_BEGIN(0, 2);
11507 IEM_MC_LOCAL(uint64_t, u64Value);
11508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11511 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11512 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11513 IEM_MC_ADVANCE_RIP();
11514 IEM_MC_END();
11515 break;
11516 }
11517 }
11518 return VINF_SUCCESS;
11519}
11520
11521
11522/** Opcode 0x8a. */
11523FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11524{
11525 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11526
11527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11528
11529 /*
11530 * If rm is denoting a register, no more instruction bytes.
11531 */
11532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11533 {
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535 IEM_MC_BEGIN(0, 1);
11536 IEM_MC_LOCAL(uint8_t, u8Value);
11537 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11538 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11539 IEM_MC_ADVANCE_RIP();
11540 IEM_MC_END();
11541 }
11542 else
11543 {
11544 /*
11545 * We're loading a register from memory.
11546 */
11547 IEM_MC_BEGIN(0, 2);
11548 IEM_MC_LOCAL(uint8_t, u8Value);
11549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11552 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11553 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11554 IEM_MC_ADVANCE_RIP();
11555 IEM_MC_END();
11556 }
11557 return VINF_SUCCESS;
11558}
11559
11560
11561/** Opcode 0x8b. */
11562FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11563{
11564 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11565
11566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11567
11568 /*
11569 * If rm is denoting a register, no more instruction bytes.
11570 */
11571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11572 {
11573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11574 switch (pVCpu->iem.s.enmEffOpSize)
11575 {
11576 case IEMMODE_16BIT:
11577 IEM_MC_BEGIN(0, 1);
11578 IEM_MC_LOCAL(uint16_t, u16Value);
11579 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11580 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11581 IEM_MC_ADVANCE_RIP();
11582 IEM_MC_END();
11583 break;
11584
11585 case IEMMODE_32BIT:
11586 IEM_MC_BEGIN(0, 1);
11587 IEM_MC_LOCAL(uint32_t, u32Value);
11588 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11589 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11590 IEM_MC_ADVANCE_RIP();
11591 IEM_MC_END();
11592 break;
11593
11594 case IEMMODE_64BIT:
11595 IEM_MC_BEGIN(0, 1);
11596 IEM_MC_LOCAL(uint64_t, u64Value);
11597 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11598 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11599 IEM_MC_ADVANCE_RIP();
11600 IEM_MC_END();
11601 break;
11602 }
11603 }
11604 else
11605 {
11606 /*
11607 * We're loading a register from memory.
11608 */
11609 switch (pVCpu->iem.s.enmEffOpSize)
11610 {
11611 case IEMMODE_16BIT:
11612 IEM_MC_BEGIN(0, 2);
11613 IEM_MC_LOCAL(uint16_t, u16Value);
11614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11617 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11618 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11619 IEM_MC_ADVANCE_RIP();
11620 IEM_MC_END();
11621 break;
11622
11623 case IEMMODE_32BIT:
11624 IEM_MC_BEGIN(0, 2);
11625 IEM_MC_LOCAL(uint32_t, u32Value);
11626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11629 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11630 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11631 IEM_MC_ADVANCE_RIP();
11632 IEM_MC_END();
11633 break;
11634
11635 case IEMMODE_64BIT:
11636 IEM_MC_BEGIN(0, 2);
11637 IEM_MC_LOCAL(uint64_t, u64Value);
11638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11641 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11642 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11643 IEM_MC_ADVANCE_RIP();
11644 IEM_MC_END();
11645 break;
11646 }
11647 }
11648 return VINF_SUCCESS;
11649}
11650
11651
11652/** Opcode 0x63. */
11653FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11654{
11655 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11656 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11657 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11658 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11659 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11660}
11661
11662
11663/** Opcode 0x8c. */
11664FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11665{
11666 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11667
11668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11669
11670 /*
11671 * Check that the destination register exists. The REX.R prefix is ignored.
11672 */
11673 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11674 if ( iSegReg > X86_SREG_GS)
11675 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11676
11677 /*
11678 * If rm is denoting a register, no more instruction bytes.
11679 * In that case, the operand size is respected and the upper bits are
11680 * cleared (starting with some pentium).
11681 */
11682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11683 {
11684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11685 switch (pVCpu->iem.s.enmEffOpSize)
11686 {
11687 case IEMMODE_16BIT:
11688 IEM_MC_BEGIN(0, 1);
11689 IEM_MC_LOCAL(uint16_t, u16Value);
11690 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11691 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11692 IEM_MC_ADVANCE_RIP();
11693 IEM_MC_END();
11694 break;
11695
11696 case IEMMODE_32BIT:
11697 IEM_MC_BEGIN(0, 1);
11698 IEM_MC_LOCAL(uint32_t, u32Value);
11699 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11700 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11701 IEM_MC_ADVANCE_RIP();
11702 IEM_MC_END();
11703 break;
11704
11705 case IEMMODE_64BIT:
11706 IEM_MC_BEGIN(0, 1);
11707 IEM_MC_LOCAL(uint64_t, u64Value);
11708 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11709 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11710 IEM_MC_ADVANCE_RIP();
11711 IEM_MC_END();
11712 break;
11713 }
11714 }
11715 else
11716 {
11717 /*
11718 * We're saving the register to memory. The access is word sized
11719 * regardless of operand size prefixes.
11720 */
11721#if 0 /* not necessary */
11722 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11723#endif
11724 IEM_MC_BEGIN(0, 2);
11725 IEM_MC_LOCAL(uint16_t, u16Value);
11726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11729 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11730 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11731 IEM_MC_ADVANCE_RIP();
11732 IEM_MC_END();
11733 }
11734 return VINF_SUCCESS;
11735}
11736
11737
11738
11739
11740/** Opcode 0x8d. */
11741FNIEMOP_DEF(iemOp_lea_Gv_M)
11742{
11743 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11746 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11747
11748 switch (pVCpu->iem.s.enmEffOpSize)
11749 {
11750 case IEMMODE_16BIT:
11751 IEM_MC_BEGIN(0, 2);
11752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11753 IEM_MC_LOCAL(uint16_t, u16Cast);
11754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11756 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11757 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11758 IEM_MC_ADVANCE_RIP();
11759 IEM_MC_END();
11760 return VINF_SUCCESS;
11761
11762 case IEMMODE_32BIT:
11763 IEM_MC_BEGIN(0, 2);
11764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11765 IEM_MC_LOCAL(uint32_t, u32Cast);
11766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11768 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11769 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11770 IEM_MC_ADVANCE_RIP();
11771 IEM_MC_END();
11772 return VINF_SUCCESS;
11773
11774 case IEMMODE_64BIT:
11775 IEM_MC_BEGIN(0, 1);
11776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11779 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11780 IEM_MC_ADVANCE_RIP();
11781 IEM_MC_END();
11782 return VINF_SUCCESS;
11783 }
11784 AssertFailedReturn(VERR_IEM_IPE_7);
11785}
11786
11787
11788/** Opcode 0x8e. */
11789FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11790{
11791 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11792
11793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11794
11795 /*
11796 * The practical operand size is 16-bit.
11797 */
11798#if 0 /* not necessary */
11799 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11800#endif
11801
11802 /*
11803 * Check that the destination register exists and can be used with this
11804 * instruction. The REX.R prefix is ignored.
11805 */
11806 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11807 if ( iSegReg == X86_SREG_CS
11808 || iSegReg > X86_SREG_GS)
11809 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11810
11811 /*
11812 * If rm is denoting a register, no more instruction bytes.
11813 */
11814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11815 {
11816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11817 IEM_MC_BEGIN(2, 0);
11818 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11819 IEM_MC_ARG(uint16_t, u16Value, 1);
11820 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11821 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11822 IEM_MC_END();
11823 }
11824 else
11825 {
11826 /*
11827 * We're loading the register from memory. The access is word sized
11828 * regardless of operand size prefixes.
11829 */
11830 IEM_MC_BEGIN(2, 1);
11831 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11832 IEM_MC_ARG(uint16_t, u16Value, 1);
11833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11836 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11837 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11838 IEM_MC_END();
11839 }
11840 return VINF_SUCCESS;
11841}
11842
11843
11844/** Opcode 0x8f /0. */
11845FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11846{
11847 /* This bugger is rather annoying as it requires rSP to be updated before
11848 doing the effective address calculations. Will eventually require a
11849 split between the R/M+SIB decoding and the effective address
11850 calculation - which is something that is required for any attempt at
11851 reusing this code for a recompiler. It may also be good to have if we
11852 need to delay #UD exception caused by invalid lock prefixes.
11853
11854 For now, we'll do a mostly safe interpreter-only implementation here. */
11855 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11856 * now until tests show it's checked.. */
11857 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11858
11859 /* Register access is relatively easy and can share code. */
11860 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11861 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11862
11863 /*
11864 * Memory target.
11865 *
11866 * Intel says that RSP is incremented before it's used in any effective
11867 * address calcuations. This means some serious extra annoyance here since
11868 * we decode and calculate the effective address in one step and like to
11869 * delay committing registers till everything is done.
11870 *
11871 * So, we'll decode and calculate the effective address twice. This will
11872 * require some recoding if turned into a recompiler.
11873 */
11874 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11875
11876#ifndef TST_IEM_CHECK_MC
11877 /* Calc effective address with modified ESP. */
11878/** @todo testcase */
11879 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11880 RTGCPTR GCPtrEff;
11881 VBOXSTRICTRC rcStrict;
11882 switch (pVCpu->iem.s.enmEffOpSize)
11883 {
11884 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11885 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11886 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11888 }
11889 if (rcStrict != VINF_SUCCESS)
11890 return rcStrict;
11891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11892
11893 /* Perform the operation - this should be CImpl. */
11894 RTUINT64U TmpRsp;
11895 TmpRsp.u = pCtx->rsp;
11896 switch (pVCpu->iem.s.enmEffOpSize)
11897 {
11898 case IEMMODE_16BIT:
11899 {
11900 uint16_t u16Value;
11901 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11902 if (rcStrict == VINF_SUCCESS)
11903 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11904 break;
11905 }
11906
11907 case IEMMODE_32BIT:
11908 {
11909 uint32_t u32Value;
11910 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11911 if (rcStrict == VINF_SUCCESS)
11912 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11913 break;
11914 }
11915
11916 case IEMMODE_64BIT:
11917 {
11918 uint64_t u64Value;
11919 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11920 if (rcStrict == VINF_SUCCESS)
11921 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11922 break;
11923 }
11924
11925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11926 }
11927 if (rcStrict == VINF_SUCCESS)
11928 {
11929 pCtx->rsp = TmpRsp.u;
11930 iemRegUpdateRipAndClearRF(pVCpu);
11931 }
11932 return rcStrict;
11933
11934#else
11935 return VERR_IEM_IPE_2;
11936#endif
11937}
11938
11939
11940/** Opcode 0x8f. */
11941FNIEMOP_DEF(iemOp_Grp1A)
11942{
11943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11944 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11945 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11946
11947 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11948 /** @todo XOP decoding. */
11949 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11950 return IEMOP_RAISE_INVALID_OPCODE();
11951}
11952
11953
11954/**
11955 * Common 'xchg reg,rAX' helper.
11956 */
11957FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11958{
11959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11960
11961 iReg |= pVCpu->iem.s.uRexB;
11962 switch (pVCpu->iem.s.enmEffOpSize)
11963 {
11964 case IEMMODE_16BIT:
11965 IEM_MC_BEGIN(0, 2);
11966 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11967 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11968 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11969 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11970 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11971 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11972 IEM_MC_ADVANCE_RIP();
11973 IEM_MC_END();
11974 return VINF_SUCCESS;
11975
11976 case IEMMODE_32BIT:
11977 IEM_MC_BEGIN(0, 2);
11978 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11979 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11980 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11981 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11982 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11983 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11984 IEM_MC_ADVANCE_RIP();
11985 IEM_MC_END();
11986 return VINF_SUCCESS;
11987
11988 case IEMMODE_64BIT:
11989 IEM_MC_BEGIN(0, 2);
11990 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11991 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11992 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11993 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11994 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11995 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11996 IEM_MC_ADVANCE_RIP();
11997 IEM_MC_END();
11998 return VINF_SUCCESS;
11999
12000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12001 }
12002}
12003
12004
12005/** Opcode 0x90. */
12006FNIEMOP_DEF(iemOp_nop)
12007{
12008 /* R8/R8D and RAX/EAX can be exchanged. */
12009 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
12010 {
12011 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
12012 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
12013 }
12014
12015 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
12016 IEMOP_MNEMONIC(pause, "pause");
12017 else
12018 IEMOP_MNEMONIC(nop, "nop");
12019 IEM_MC_BEGIN(0, 0);
12020 IEM_MC_ADVANCE_RIP();
12021 IEM_MC_END();
12022 return VINF_SUCCESS;
12023}
12024
12025
12026/** Opcode 0x91. */
12027FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
12028{
12029 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
12030 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
12031}
12032
12033
12034/** Opcode 0x92. */
12035FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
12036{
12037 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
12038 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
12039}
12040
12041
12042/** Opcode 0x93. */
12043FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
12044{
12045 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
12046 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
12047}
12048
12049
12050/** Opcode 0x94. */
12051FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
12052{
12053 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
12054 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
12055}
12056
12057
12058/** Opcode 0x95. */
12059FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
12060{
12061 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
12062 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
12063}
12064
12065
12066/** Opcode 0x96. */
12067FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
12068{
12069 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
12070 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
12071}
12072
12073
12074/** Opcode 0x97. */
12075FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
12076{
12077 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
12078 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
12079}
12080
12081
12082/** Opcode 0x98. */
12083FNIEMOP_DEF(iemOp_cbw)
12084{
12085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12086 switch (pVCpu->iem.s.enmEffOpSize)
12087 {
12088 case IEMMODE_16BIT:
12089 IEMOP_MNEMONIC(cbw, "cbw");
12090 IEM_MC_BEGIN(0, 1);
12091 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
12092 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
12093 } IEM_MC_ELSE() {
12094 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
12095 } IEM_MC_ENDIF();
12096 IEM_MC_ADVANCE_RIP();
12097 IEM_MC_END();
12098 return VINF_SUCCESS;
12099
12100 case IEMMODE_32BIT:
12101 IEMOP_MNEMONIC(cwde, "cwde");
12102 IEM_MC_BEGIN(0, 1);
12103 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
12104 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
12105 } IEM_MC_ELSE() {
12106 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
12107 } IEM_MC_ENDIF();
12108 IEM_MC_ADVANCE_RIP();
12109 IEM_MC_END();
12110 return VINF_SUCCESS;
12111
12112 case IEMMODE_64BIT:
12113 IEMOP_MNEMONIC(cdqe, "cdqe");
12114 IEM_MC_BEGIN(0, 1);
12115 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12116 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
12117 } IEM_MC_ELSE() {
12118 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
12119 } IEM_MC_ENDIF();
12120 IEM_MC_ADVANCE_RIP();
12121 IEM_MC_END();
12122 return VINF_SUCCESS;
12123
12124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12125 }
12126}
12127
12128
12129/** Opcode 0x99. */
12130FNIEMOP_DEF(iemOp_cwd)
12131{
12132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12133 switch (pVCpu->iem.s.enmEffOpSize)
12134 {
12135 case IEMMODE_16BIT:
12136 IEMOP_MNEMONIC(cwd, "cwd");
12137 IEM_MC_BEGIN(0, 1);
12138 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
12139 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
12140 } IEM_MC_ELSE() {
12141 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
12142 } IEM_MC_ENDIF();
12143 IEM_MC_ADVANCE_RIP();
12144 IEM_MC_END();
12145 return VINF_SUCCESS;
12146
12147 case IEMMODE_32BIT:
12148 IEMOP_MNEMONIC(cdq, "cdq");
12149 IEM_MC_BEGIN(0, 1);
12150 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12151 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
12152 } IEM_MC_ELSE() {
12153 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
12154 } IEM_MC_ENDIF();
12155 IEM_MC_ADVANCE_RIP();
12156 IEM_MC_END();
12157 return VINF_SUCCESS;
12158
12159 case IEMMODE_64BIT:
12160 IEMOP_MNEMONIC(cqo, "cqo");
12161 IEM_MC_BEGIN(0, 1);
12162 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
12163 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
12164 } IEM_MC_ELSE() {
12165 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
12166 } IEM_MC_ENDIF();
12167 IEM_MC_ADVANCE_RIP();
12168 IEM_MC_END();
12169 return VINF_SUCCESS;
12170
12171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12172 }
12173}
12174
12175
12176/** Opcode 0x9a. */
12177FNIEMOP_DEF(iemOp_call_Ap)
12178{
12179 IEMOP_MNEMONIC(call_Ap, "call Ap");
12180 IEMOP_HLP_NO_64BIT();
12181
12182 /* Decode the far pointer address and pass it on to the far call C implementation. */
12183 uint32_t offSeg;
12184 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12185 IEM_OPCODE_GET_NEXT_U32(&offSeg);
12186 else
12187 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
12188 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
12189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12190 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
12191}
12192
12193
12194/** Opcode 0x9b. (aka fwait) */
12195FNIEMOP_DEF(iemOp_wait)
12196{
12197 IEMOP_MNEMONIC(wait, "wait");
12198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12199
12200 IEM_MC_BEGIN(0, 0);
12201 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12202 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12203 IEM_MC_ADVANCE_RIP();
12204 IEM_MC_END();
12205 return VINF_SUCCESS;
12206}
12207
12208
12209/** Opcode 0x9c. */
12210FNIEMOP_DEF(iemOp_pushf_Fv)
12211{
12212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12213 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12214 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12215}
12216
12217
12218/** Opcode 0x9d. */
12219FNIEMOP_DEF(iemOp_popf_Fv)
12220{
12221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12222 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12223 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12224}
12225
12226
12227/** Opcode 0x9e. */
12228FNIEMOP_DEF(iemOp_sahf)
12229{
12230 IEMOP_MNEMONIC(sahf, "sahf");
12231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12232 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12233 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12234 return IEMOP_RAISE_INVALID_OPCODE();
12235 IEM_MC_BEGIN(0, 2);
12236 IEM_MC_LOCAL(uint32_t, u32Flags);
12237 IEM_MC_LOCAL(uint32_t, EFlags);
12238 IEM_MC_FETCH_EFLAGS(EFlags);
12239 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12240 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12241 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12242 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12243 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12244 IEM_MC_COMMIT_EFLAGS(EFlags);
12245 IEM_MC_ADVANCE_RIP();
12246 IEM_MC_END();
12247 return VINF_SUCCESS;
12248}
12249
12250
12251/** Opcode 0x9f. */
12252FNIEMOP_DEF(iemOp_lahf)
12253{
12254 IEMOP_MNEMONIC(lahf, "lahf");
12255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12256 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12257 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12258 return IEMOP_RAISE_INVALID_OPCODE();
12259 IEM_MC_BEGIN(0, 1);
12260 IEM_MC_LOCAL(uint8_t, u8Flags);
12261 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12262 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12263 IEM_MC_ADVANCE_RIP();
12264 IEM_MC_END();
12265 return VINF_SUCCESS;
12266}
12267
12268
12269/**
12270 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12271 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12272 * prefixes. Will return on failures.
12273 * @param a_GCPtrMemOff The variable to store the offset in.
12274 */
12275#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12276 do \
12277 { \
12278 switch (pVCpu->iem.s.enmEffAddrMode) \
12279 { \
12280 case IEMMODE_16BIT: \
12281 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12282 break; \
12283 case IEMMODE_32BIT: \
12284 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12285 break; \
12286 case IEMMODE_64BIT: \
12287 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12288 break; \
12289 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12290 } \
12291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12292 } while (0)
12293
12294/** Opcode 0xa0. */
12295FNIEMOP_DEF(iemOp_mov_Al_Ob)
12296{
12297 /*
12298 * Get the offset and fend of lock prefixes.
12299 */
12300 RTGCPTR GCPtrMemOff;
12301 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12302
12303 /*
12304 * Fetch AL.
12305 */
12306 IEM_MC_BEGIN(0,1);
12307 IEM_MC_LOCAL(uint8_t, u8Tmp);
12308 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12309 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12310 IEM_MC_ADVANCE_RIP();
12311 IEM_MC_END();
12312 return VINF_SUCCESS;
12313}
12314
12315
12316/** Opcode 0xa1. */
12317FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12318{
12319 /*
12320 * Get the offset and fend of lock prefixes.
12321 */
12322 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12323 RTGCPTR GCPtrMemOff;
12324 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12325
12326 /*
12327 * Fetch rAX.
12328 */
12329 switch (pVCpu->iem.s.enmEffOpSize)
12330 {
12331 case IEMMODE_16BIT:
12332 IEM_MC_BEGIN(0,1);
12333 IEM_MC_LOCAL(uint16_t, u16Tmp);
12334 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12335 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12336 IEM_MC_ADVANCE_RIP();
12337 IEM_MC_END();
12338 return VINF_SUCCESS;
12339
12340 case IEMMODE_32BIT:
12341 IEM_MC_BEGIN(0,1);
12342 IEM_MC_LOCAL(uint32_t, u32Tmp);
12343 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12344 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12345 IEM_MC_ADVANCE_RIP();
12346 IEM_MC_END();
12347 return VINF_SUCCESS;
12348
12349 case IEMMODE_64BIT:
12350 IEM_MC_BEGIN(0,1);
12351 IEM_MC_LOCAL(uint64_t, u64Tmp);
12352 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12353 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12354 IEM_MC_ADVANCE_RIP();
12355 IEM_MC_END();
12356 return VINF_SUCCESS;
12357
12358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12359 }
12360}
12361
12362
12363/** Opcode 0xa2. */
12364FNIEMOP_DEF(iemOp_mov_Ob_AL)
12365{
12366 /*
12367 * Get the offset and fend of lock prefixes.
12368 */
12369 RTGCPTR GCPtrMemOff;
12370 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12371
12372 /*
12373 * Store AL.
12374 */
12375 IEM_MC_BEGIN(0,1);
12376 IEM_MC_LOCAL(uint8_t, u8Tmp);
12377 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12378 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12379 IEM_MC_ADVANCE_RIP();
12380 IEM_MC_END();
12381 return VINF_SUCCESS;
12382}
12383
12384
12385/** Opcode 0xa3. */
12386FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12387{
12388 /*
12389 * Get the offset and fend of lock prefixes.
12390 */
12391 RTGCPTR GCPtrMemOff;
12392 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12393
12394 /*
12395 * Store rAX.
12396 */
12397 switch (pVCpu->iem.s.enmEffOpSize)
12398 {
12399 case IEMMODE_16BIT:
12400 IEM_MC_BEGIN(0,1);
12401 IEM_MC_LOCAL(uint16_t, u16Tmp);
12402 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12403 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12404 IEM_MC_ADVANCE_RIP();
12405 IEM_MC_END();
12406 return VINF_SUCCESS;
12407
12408 case IEMMODE_32BIT:
12409 IEM_MC_BEGIN(0,1);
12410 IEM_MC_LOCAL(uint32_t, u32Tmp);
12411 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12412 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12413 IEM_MC_ADVANCE_RIP();
12414 IEM_MC_END();
12415 return VINF_SUCCESS;
12416
12417 case IEMMODE_64BIT:
12418 IEM_MC_BEGIN(0,1);
12419 IEM_MC_LOCAL(uint64_t, u64Tmp);
12420 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12421 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12422 IEM_MC_ADVANCE_RIP();
12423 IEM_MC_END();
12424 return VINF_SUCCESS;
12425
12426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12427 }
12428}
12429
12430/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12431#define IEM_MOVS_CASE(ValBits, AddrBits) \
12432 IEM_MC_BEGIN(0, 2); \
12433 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12434 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12435 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12436 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12437 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12438 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12440 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12441 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12442 } IEM_MC_ELSE() { \
12443 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12444 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12445 } IEM_MC_ENDIF(); \
12446 IEM_MC_ADVANCE_RIP(); \
12447 IEM_MC_END();
12448
12449/** Opcode 0xa4. */
12450FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12451{
12452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12453
12454 /*
12455 * Use the C implementation if a repeat prefix is encountered.
12456 */
12457 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12458 {
12459 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12460 switch (pVCpu->iem.s.enmEffAddrMode)
12461 {
12462 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12463 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12464 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12466 }
12467 }
12468 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12469
12470 /*
12471 * Sharing case implementation with movs[wdq] below.
12472 */
12473 switch (pVCpu->iem.s.enmEffAddrMode)
12474 {
12475 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12476 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12477 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12479 }
12480 return VINF_SUCCESS;
12481}
12482
12483
12484/** Opcode 0xa5. */
12485FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12486{
12487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12488
12489 /*
12490 * Use the C implementation if a repeat prefix is encountered.
12491 */
12492 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12493 {
12494 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12495 switch (pVCpu->iem.s.enmEffOpSize)
12496 {
12497 case IEMMODE_16BIT:
12498 switch (pVCpu->iem.s.enmEffAddrMode)
12499 {
12500 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12501 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12502 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12504 }
12505 break;
12506 case IEMMODE_32BIT:
12507 switch (pVCpu->iem.s.enmEffAddrMode)
12508 {
12509 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12510 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12511 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12513 }
12514 case IEMMODE_64BIT:
12515 switch (pVCpu->iem.s.enmEffAddrMode)
12516 {
12517 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12518 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12519 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12521 }
12522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12523 }
12524 }
12525 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12526
12527 /*
12528 * Annoying double switch here.
12529 * Using ugly macro for implementing the cases, sharing it with movsb.
12530 */
12531 switch (pVCpu->iem.s.enmEffOpSize)
12532 {
12533 case IEMMODE_16BIT:
12534 switch (pVCpu->iem.s.enmEffAddrMode)
12535 {
12536 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12537 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12538 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12540 }
12541 break;
12542
12543 case IEMMODE_32BIT:
12544 switch (pVCpu->iem.s.enmEffAddrMode)
12545 {
12546 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12547 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12548 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12550 }
12551 break;
12552
12553 case IEMMODE_64BIT:
12554 switch (pVCpu->iem.s.enmEffAddrMode)
12555 {
12556 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12557 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12558 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12560 }
12561 break;
12562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12563 }
12564 return VINF_SUCCESS;
12565}
12566
12567#undef IEM_MOVS_CASE
12568
12569/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12570#define IEM_CMPS_CASE(ValBits, AddrBits) \
12571 IEM_MC_BEGIN(3, 3); \
12572 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12573 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12574 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12575 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12576 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12577 \
12578 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12579 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12580 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12581 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12582 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12583 IEM_MC_REF_EFLAGS(pEFlags); \
12584 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12585 \
12586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12587 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12588 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12589 } IEM_MC_ELSE() { \
12590 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12591 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12592 } IEM_MC_ENDIF(); \
12593 IEM_MC_ADVANCE_RIP(); \
12594 IEM_MC_END(); \
12595
12596/** Opcode 0xa6. */
12597FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12598{
12599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12600
12601 /*
12602 * Use the C implementation if a repeat prefix is encountered.
12603 */
12604 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12605 {
12606 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12607 switch (pVCpu->iem.s.enmEffAddrMode)
12608 {
12609 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12610 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12611 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12613 }
12614 }
12615 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12616 {
12617 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12618 switch (pVCpu->iem.s.enmEffAddrMode)
12619 {
12620 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12621 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12622 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12624 }
12625 }
12626 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12627
12628 /*
12629 * Sharing case implementation with cmps[wdq] below.
12630 */
12631 switch (pVCpu->iem.s.enmEffAddrMode)
12632 {
12633 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12634 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12635 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12637 }
12638 return VINF_SUCCESS;
12639
12640}
12641
12642
12643/** Opcode 0xa7. */
12644FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12645{
12646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12647
12648 /*
12649 * Use the C implementation if a repeat prefix is encountered.
12650 */
12651 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12652 {
12653 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12654 switch (pVCpu->iem.s.enmEffOpSize)
12655 {
12656 case IEMMODE_16BIT:
12657 switch (pVCpu->iem.s.enmEffAddrMode)
12658 {
12659 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12660 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12661 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12663 }
12664 break;
12665 case IEMMODE_32BIT:
12666 switch (pVCpu->iem.s.enmEffAddrMode)
12667 {
12668 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12669 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12670 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12672 }
12673 case IEMMODE_64BIT:
12674 switch (pVCpu->iem.s.enmEffAddrMode)
12675 {
12676 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12680 }
12681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12682 }
12683 }
12684
12685 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12686 {
12687 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12688 switch (pVCpu->iem.s.enmEffOpSize)
12689 {
12690 case IEMMODE_16BIT:
12691 switch (pVCpu->iem.s.enmEffAddrMode)
12692 {
12693 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12694 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12695 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12697 }
12698 break;
12699 case IEMMODE_32BIT:
12700 switch (pVCpu->iem.s.enmEffAddrMode)
12701 {
12702 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12703 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12704 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12706 }
12707 case IEMMODE_64BIT:
12708 switch (pVCpu->iem.s.enmEffAddrMode)
12709 {
12710 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12711 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12712 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12714 }
12715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12716 }
12717 }
12718
12719 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12720
12721 /*
12722 * Annoying double switch here.
12723 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12724 */
12725 switch (pVCpu->iem.s.enmEffOpSize)
12726 {
12727 case IEMMODE_16BIT:
12728 switch (pVCpu->iem.s.enmEffAddrMode)
12729 {
12730 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12731 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12732 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12734 }
12735 break;
12736
12737 case IEMMODE_32BIT:
12738 switch (pVCpu->iem.s.enmEffAddrMode)
12739 {
12740 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12741 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12742 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12744 }
12745 break;
12746
12747 case IEMMODE_64BIT:
12748 switch (pVCpu->iem.s.enmEffAddrMode)
12749 {
12750 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12751 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12752 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12754 }
12755 break;
12756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12757 }
12758 return VINF_SUCCESS;
12759
12760}
12761
12762#undef IEM_CMPS_CASE
12763
12764/** Opcode 0xa8. */
12765FNIEMOP_DEF(iemOp_test_AL_Ib)
12766{
12767 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12768 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12769 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12770}
12771
12772
12773/** Opcode 0xa9. */
12774FNIEMOP_DEF(iemOp_test_eAX_Iz)
12775{
12776 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12777 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12778 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12779}
12780
12781
12782/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12783#define IEM_STOS_CASE(ValBits, AddrBits) \
12784 IEM_MC_BEGIN(0, 2); \
12785 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12786 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12787 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12788 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12789 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12790 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12791 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12792 } IEM_MC_ELSE() { \
12793 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12794 } IEM_MC_ENDIF(); \
12795 IEM_MC_ADVANCE_RIP(); \
12796 IEM_MC_END(); \
12797
12798/** Opcode 0xaa. */
12799FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12800{
12801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12802
12803 /*
12804 * Use the C implementation if a repeat prefix is encountered.
12805 */
12806 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12807 {
12808 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12809 switch (pVCpu->iem.s.enmEffAddrMode)
12810 {
12811 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12812 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12813 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12815 }
12816 }
12817 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12818
12819 /*
12820 * Sharing case implementation with stos[wdq] below.
12821 */
12822 switch (pVCpu->iem.s.enmEffAddrMode)
12823 {
12824 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12825 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12826 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12828 }
12829 return VINF_SUCCESS;
12830}
12831
12832
12833/** Opcode 0xab. */
12834FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12835{
12836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12837
12838 /*
12839 * Use the C implementation if a repeat prefix is encountered.
12840 */
12841 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12842 {
12843 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12844 switch (pVCpu->iem.s.enmEffOpSize)
12845 {
12846 case IEMMODE_16BIT:
12847 switch (pVCpu->iem.s.enmEffAddrMode)
12848 {
12849 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12850 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12851 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12853 }
12854 break;
12855 case IEMMODE_32BIT:
12856 switch (pVCpu->iem.s.enmEffAddrMode)
12857 {
12858 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12859 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12860 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12862 }
12863 case IEMMODE_64BIT:
12864 switch (pVCpu->iem.s.enmEffAddrMode)
12865 {
12866 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12867 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12868 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12870 }
12871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12872 }
12873 }
12874 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12875
12876 /*
12877 * Annoying double switch here.
12878 * Using ugly macro for implementing the cases, sharing it with stosb.
12879 */
12880 switch (pVCpu->iem.s.enmEffOpSize)
12881 {
12882 case IEMMODE_16BIT:
12883 switch (pVCpu->iem.s.enmEffAddrMode)
12884 {
12885 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12886 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12887 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12889 }
12890 break;
12891
12892 case IEMMODE_32BIT:
12893 switch (pVCpu->iem.s.enmEffAddrMode)
12894 {
12895 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12896 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12897 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12899 }
12900 break;
12901
12902 case IEMMODE_64BIT:
12903 switch (pVCpu->iem.s.enmEffAddrMode)
12904 {
12905 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12906 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12907 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12909 }
12910 break;
12911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12912 }
12913 return VINF_SUCCESS;
12914}
12915
12916#undef IEM_STOS_CASE
12917
12918/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12919#define IEM_LODS_CASE(ValBits, AddrBits) \
12920 IEM_MC_BEGIN(0, 2); \
12921 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12922 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12923 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12924 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12925 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12927 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12928 } IEM_MC_ELSE() { \
12929 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12930 } IEM_MC_ENDIF(); \
12931 IEM_MC_ADVANCE_RIP(); \
12932 IEM_MC_END();
12933
12934/** Opcode 0xac. */
12935FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12936{
12937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12938
12939 /*
12940 * Use the C implementation if a repeat prefix is encountered.
12941 */
12942 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12943 {
12944 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12945 switch (pVCpu->iem.s.enmEffAddrMode)
12946 {
12947 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12948 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12949 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12951 }
12952 }
12953 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12954
12955 /*
12956 * Sharing case implementation with stos[wdq] below.
12957 */
12958 switch (pVCpu->iem.s.enmEffAddrMode)
12959 {
12960 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12961 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12962 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12964 }
12965 return VINF_SUCCESS;
12966}
12967
12968
12969/** Opcode 0xad. */
12970FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12971{
12972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12973
12974 /*
12975 * Use the C implementation if a repeat prefix is encountered.
12976 */
12977 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12978 {
12979 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12980 switch (pVCpu->iem.s.enmEffOpSize)
12981 {
12982 case IEMMODE_16BIT:
12983 switch (pVCpu->iem.s.enmEffAddrMode)
12984 {
12985 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12986 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12987 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12989 }
12990 break;
12991 case IEMMODE_32BIT:
12992 switch (pVCpu->iem.s.enmEffAddrMode)
12993 {
12994 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12995 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12996 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12998 }
12999 case IEMMODE_64BIT:
13000 switch (pVCpu->iem.s.enmEffAddrMode)
13001 {
13002 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
13003 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
13004 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
13005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13006 }
13007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13008 }
13009 }
13010 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
13011
13012 /*
13013 * Annoying double switch here.
13014 * Using ugly macro for implementing the cases, sharing it with lodsb.
13015 */
13016 switch (pVCpu->iem.s.enmEffOpSize)
13017 {
13018 case IEMMODE_16BIT:
13019 switch (pVCpu->iem.s.enmEffAddrMode)
13020 {
13021 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
13022 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
13023 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
13024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13025 }
13026 break;
13027
13028 case IEMMODE_32BIT:
13029 switch (pVCpu->iem.s.enmEffAddrMode)
13030 {
13031 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
13032 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
13033 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
13034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13035 }
13036 break;
13037
13038 case IEMMODE_64BIT:
13039 switch (pVCpu->iem.s.enmEffAddrMode)
13040 {
13041 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13042 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
13043 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
13044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13045 }
13046 break;
13047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13048 }
13049 return VINF_SUCCESS;
13050}
13051
13052#undef IEM_LODS_CASE
13053
13054/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
13055#define IEM_SCAS_CASE(ValBits, AddrBits) \
13056 IEM_MC_BEGIN(3, 2); \
13057 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
13058 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
13059 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13060 IEM_MC_LOCAL(RTGCPTR, uAddr); \
13061 \
13062 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
13063 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
13064 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
13065 IEM_MC_REF_EFLAGS(pEFlags); \
13066 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
13067 \
13068 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
13069 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
13070 } IEM_MC_ELSE() { \
13071 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
13072 } IEM_MC_ENDIF(); \
13073 IEM_MC_ADVANCE_RIP(); \
13074 IEM_MC_END();
13075
13076/** Opcode 0xae. */
13077FNIEMOP_DEF(iemOp_scasb_AL_Xb)
13078{
13079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13080
13081 /*
13082 * Use the C implementation if a repeat prefix is encountered.
13083 */
13084 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
13085 {
13086 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
13087 switch (pVCpu->iem.s.enmEffAddrMode)
13088 {
13089 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
13090 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
13091 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
13092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13093 }
13094 }
13095 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13096 {
13097 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
13098 switch (pVCpu->iem.s.enmEffAddrMode)
13099 {
13100 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
13101 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
13102 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
13103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13104 }
13105 }
13106 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
13107
13108 /*
13109 * Sharing case implementation with stos[wdq] below.
13110 */
13111 switch (pVCpu->iem.s.enmEffAddrMode)
13112 {
13113 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
13114 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
13115 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
13116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13117 }
13118 return VINF_SUCCESS;
13119}
13120
13121
13122/** Opcode 0xaf. */
13123FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
13124{
13125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13126
13127 /*
13128 * Use the C implementation if a repeat prefix is encountered.
13129 */
13130 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
13131 {
13132 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
13133 switch (pVCpu->iem.s.enmEffOpSize)
13134 {
13135 case IEMMODE_16BIT:
13136 switch (pVCpu->iem.s.enmEffAddrMode)
13137 {
13138 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
13139 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
13140 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
13141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13142 }
13143 break;
13144 case IEMMODE_32BIT:
13145 switch (pVCpu->iem.s.enmEffAddrMode)
13146 {
13147 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
13148 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
13149 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
13150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13151 }
13152 case IEMMODE_64BIT:
13153 switch (pVCpu->iem.s.enmEffAddrMode)
13154 {
13155 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
13156 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
13157 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
13158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13159 }
13160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13161 }
13162 }
13163 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13164 {
13165 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
13166 switch (pVCpu->iem.s.enmEffOpSize)
13167 {
13168 case IEMMODE_16BIT:
13169 switch (pVCpu->iem.s.enmEffAddrMode)
13170 {
13171 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
13172 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
13173 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
13174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13175 }
13176 break;
13177 case IEMMODE_32BIT:
13178 switch (pVCpu->iem.s.enmEffAddrMode)
13179 {
13180 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
13181 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
13182 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
13183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13184 }
13185 case IEMMODE_64BIT:
13186 switch (pVCpu->iem.s.enmEffAddrMode)
13187 {
13188 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
13189 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
13190 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
13191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13192 }
13193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13194 }
13195 }
13196 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
13197
13198 /*
13199 * Annoying double switch here.
13200 * Using ugly macro for implementing the cases, sharing it with scasb.
13201 */
13202 switch (pVCpu->iem.s.enmEffOpSize)
13203 {
13204 case IEMMODE_16BIT:
13205 switch (pVCpu->iem.s.enmEffAddrMode)
13206 {
13207 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13208 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13209 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13211 }
13212 break;
13213
13214 case IEMMODE_32BIT:
13215 switch (pVCpu->iem.s.enmEffAddrMode)
13216 {
13217 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13218 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13219 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13221 }
13222 break;
13223
13224 case IEMMODE_64BIT:
13225 switch (pVCpu->iem.s.enmEffAddrMode)
13226 {
13227 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13228 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13229 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13231 }
13232 break;
13233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13234 }
13235 return VINF_SUCCESS;
13236}
13237
13238#undef IEM_SCAS_CASE
13239
13240/**
13241 * Common 'mov r8, imm8' helper.
13242 */
13243FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13244{
13245 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13247
13248 IEM_MC_BEGIN(0, 1);
13249 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13250 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13251 IEM_MC_ADVANCE_RIP();
13252 IEM_MC_END();
13253
13254 return VINF_SUCCESS;
13255}
13256
13257
13258/** Opcode 0xb0. */
13259FNIEMOP_DEF(iemOp_mov_AL_Ib)
13260{
13261 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13262 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13263}
13264
13265
13266/** Opcode 0xb1. */
13267FNIEMOP_DEF(iemOp_CL_Ib)
13268{
13269 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13270 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13271}
13272
13273
13274/** Opcode 0xb2. */
13275FNIEMOP_DEF(iemOp_DL_Ib)
13276{
13277 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13278 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13279}
13280
13281
13282/** Opcode 0xb3. */
13283FNIEMOP_DEF(iemOp_BL_Ib)
13284{
13285 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13286 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13287}
13288
13289
13290/** Opcode 0xb4. */
13291FNIEMOP_DEF(iemOp_mov_AH_Ib)
13292{
13293 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13294 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13295}
13296
13297
13298/** Opcode 0xb5. */
13299FNIEMOP_DEF(iemOp_CH_Ib)
13300{
13301 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13302 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13303}
13304
13305
13306/** Opcode 0xb6. */
13307FNIEMOP_DEF(iemOp_DH_Ib)
13308{
13309 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13310 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13311}
13312
13313
13314/** Opcode 0xb7. */
13315FNIEMOP_DEF(iemOp_BH_Ib)
13316{
13317 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13318 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13319}
13320
13321
13322/**
13323 * Common 'mov regX,immX' helper.
13324 */
13325FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13326{
13327 switch (pVCpu->iem.s.enmEffOpSize)
13328 {
13329 case IEMMODE_16BIT:
13330 {
13331 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333
13334 IEM_MC_BEGIN(0, 1);
13335 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13336 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13337 IEM_MC_ADVANCE_RIP();
13338 IEM_MC_END();
13339 break;
13340 }
13341
13342 case IEMMODE_32BIT:
13343 {
13344 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13346
13347 IEM_MC_BEGIN(0, 1);
13348 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13349 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13350 IEM_MC_ADVANCE_RIP();
13351 IEM_MC_END();
13352 break;
13353 }
13354 case IEMMODE_64BIT:
13355 {
13356 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13358
13359 IEM_MC_BEGIN(0, 1);
13360 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13361 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13362 IEM_MC_ADVANCE_RIP();
13363 IEM_MC_END();
13364 break;
13365 }
13366 }
13367
13368 return VINF_SUCCESS;
13369}
13370
13371
13372/** Opcode 0xb8. */
13373FNIEMOP_DEF(iemOp_eAX_Iv)
13374{
13375 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13376 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13377}
13378
13379
13380/** Opcode 0xb9. */
13381FNIEMOP_DEF(iemOp_eCX_Iv)
13382{
13383 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13384 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13385}
13386
13387
13388/** Opcode 0xba. */
13389FNIEMOP_DEF(iemOp_eDX_Iv)
13390{
13391 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13392 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13393}
13394
13395
13396/** Opcode 0xbb. */
13397FNIEMOP_DEF(iemOp_eBX_Iv)
13398{
13399 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13400 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13401}
13402
13403
13404/** Opcode 0xbc. */
13405FNIEMOP_DEF(iemOp_eSP_Iv)
13406{
13407 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13408 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13409}
13410
13411
13412/** Opcode 0xbd. */
13413FNIEMOP_DEF(iemOp_eBP_Iv)
13414{
13415 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13416 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13417}
13418
13419
13420/** Opcode 0xbe. */
13421FNIEMOP_DEF(iemOp_eSI_Iv)
13422{
13423 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13424 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13425}
13426
13427
13428/** Opcode 0xbf. */
13429FNIEMOP_DEF(iemOp_eDI_Iv)
13430{
13431 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13432 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13433}
13434
13435
13436/** Opcode 0xc0. */
13437FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13438{
13439 IEMOP_HLP_MIN_186();
13440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13441 PCIEMOPSHIFTSIZES pImpl;
13442 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13443 {
13444 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13445 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13446 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13447 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13448 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13449 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13450 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13451 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13452 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13453 }
13454 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13455
13456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13457 {
13458 /* register */
13459 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13461 IEM_MC_BEGIN(3, 0);
13462 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13463 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13464 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13465 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13466 IEM_MC_REF_EFLAGS(pEFlags);
13467 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13468 IEM_MC_ADVANCE_RIP();
13469 IEM_MC_END();
13470 }
13471 else
13472 {
13473 /* memory */
13474 IEM_MC_BEGIN(3, 2);
13475 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13476 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13477 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13479
13480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13481 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13482 IEM_MC_ASSIGN(cShiftArg, cShift);
13483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13484 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13485 IEM_MC_FETCH_EFLAGS(EFlags);
13486 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13487
13488 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13489 IEM_MC_COMMIT_EFLAGS(EFlags);
13490 IEM_MC_ADVANCE_RIP();
13491 IEM_MC_END();
13492 }
13493 return VINF_SUCCESS;
13494}
13495
13496
13497/** Opcode 0xc1. */
13498FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13499{
13500 IEMOP_HLP_MIN_186();
13501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13502 PCIEMOPSHIFTSIZES pImpl;
13503 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13504 {
13505 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13506 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13507 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13508 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13509 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13510 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13511 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13512 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13513 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13514 }
13515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13516
13517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13518 {
13519 /* register */
13520 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13522 switch (pVCpu->iem.s.enmEffOpSize)
13523 {
13524 case IEMMODE_16BIT:
13525 IEM_MC_BEGIN(3, 0);
13526 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13527 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13528 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13529 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13530 IEM_MC_REF_EFLAGS(pEFlags);
13531 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13532 IEM_MC_ADVANCE_RIP();
13533 IEM_MC_END();
13534 return VINF_SUCCESS;
13535
13536 case IEMMODE_32BIT:
13537 IEM_MC_BEGIN(3, 0);
13538 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13539 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13540 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13541 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13542 IEM_MC_REF_EFLAGS(pEFlags);
13543 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13544 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13545 IEM_MC_ADVANCE_RIP();
13546 IEM_MC_END();
13547 return VINF_SUCCESS;
13548
13549 case IEMMODE_64BIT:
13550 IEM_MC_BEGIN(3, 0);
13551 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13552 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13553 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13554 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13555 IEM_MC_REF_EFLAGS(pEFlags);
13556 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13557 IEM_MC_ADVANCE_RIP();
13558 IEM_MC_END();
13559 return VINF_SUCCESS;
13560
13561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13562 }
13563 }
13564 else
13565 {
13566 /* memory */
13567 switch (pVCpu->iem.s.enmEffOpSize)
13568 {
13569 case IEMMODE_16BIT:
13570 IEM_MC_BEGIN(3, 2);
13571 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13572 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13573 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13575
13576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13577 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13578 IEM_MC_ASSIGN(cShiftArg, cShift);
13579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13580 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13581 IEM_MC_FETCH_EFLAGS(EFlags);
13582 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13583
13584 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13585 IEM_MC_COMMIT_EFLAGS(EFlags);
13586 IEM_MC_ADVANCE_RIP();
13587 IEM_MC_END();
13588 return VINF_SUCCESS;
13589
13590 case IEMMODE_32BIT:
13591 IEM_MC_BEGIN(3, 2);
13592 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13593 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13594 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13596
13597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13598 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13599 IEM_MC_ASSIGN(cShiftArg, cShift);
13600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13601 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13602 IEM_MC_FETCH_EFLAGS(EFlags);
13603 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13604
13605 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13606 IEM_MC_COMMIT_EFLAGS(EFlags);
13607 IEM_MC_ADVANCE_RIP();
13608 IEM_MC_END();
13609 return VINF_SUCCESS;
13610
13611 case IEMMODE_64BIT:
13612 IEM_MC_BEGIN(3, 2);
13613 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13614 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13615 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13617
13618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13619 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13620 IEM_MC_ASSIGN(cShiftArg, cShift);
13621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13622 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13623 IEM_MC_FETCH_EFLAGS(EFlags);
13624 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13625
13626 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13627 IEM_MC_COMMIT_EFLAGS(EFlags);
13628 IEM_MC_ADVANCE_RIP();
13629 IEM_MC_END();
13630 return VINF_SUCCESS;
13631
13632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13633 }
13634 }
13635}
13636
13637
13638/** Opcode 0xc2. */
13639FNIEMOP_DEF(iemOp_retn_Iw)
13640{
13641 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13642 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13644 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13645 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13646}
13647
13648
13649/** Opcode 0xc3. */
13650FNIEMOP_DEF(iemOp_retn)
13651{
13652 IEMOP_MNEMONIC(retn, "retn");
13653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13655 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13656}
13657
13658
13659/** Opcode 0xc4. */
13660FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13661{
13662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13663 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13664 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13665 {
13666 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13667 /* The LES instruction is invalid 64-bit mode. In legacy and
13668 compatability mode it is invalid with MOD=3.
13669 The use as a VEX prefix is made possible by assigning the inverted
13670 REX.R to the top MOD bit, and the top bit in the inverted register
13671 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13672 to accessing registers 0..7 in this VEX form. */
13673 /** @todo VEX: Just use new tables for it. */
13674 return IEMOP_RAISE_INVALID_OPCODE();
13675 }
13676 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13677 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13678}
13679
13680
13681/** Opcode 0xc5. */
13682FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13683{
13684 /* The LDS instruction is invalid 64-bit mode. In legacy and
13685 compatability mode it is invalid with MOD=3.
13686 The use as a VEX prefix is made possible by assigning the inverted
13687 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13688 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13690 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13691 {
13692 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13693 {
13694 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13695 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13696 }
13697 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13698 }
13699
13700 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13701 /** @todo Test when exctly the VEX conformance checks kick in during
13702 * instruction decoding and fetching (using \#PF). */
13703 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13704 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13705 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13706#if 0 /* will make sense of this next week... */
13707 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13708 &&
13709 )
13710 {
13711
13712 }
13713#endif
13714
13715 /** @todo VEX: Just use new tables for it. */
13716 return IEMOP_RAISE_INVALID_OPCODE();
13717}
13718
13719
13720/** Opcode 0xc6. */
13721FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13722{
13723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13724 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13725 return IEMOP_RAISE_INVALID_OPCODE();
13726 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13727
13728 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13729 {
13730 /* register access */
13731 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13733 IEM_MC_BEGIN(0, 0);
13734 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13735 IEM_MC_ADVANCE_RIP();
13736 IEM_MC_END();
13737 }
13738 else
13739 {
13740 /* memory access. */
13741 IEM_MC_BEGIN(0, 1);
13742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13744 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13746 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13747 IEM_MC_ADVANCE_RIP();
13748 IEM_MC_END();
13749 }
13750 return VINF_SUCCESS;
13751}
13752
13753
13754/** Opcode 0xc7. */
13755FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13756{
13757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13758 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13759 return IEMOP_RAISE_INVALID_OPCODE();
13760 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13761
13762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13763 {
13764 /* register access */
13765 switch (pVCpu->iem.s.enmEffOpSize)
13766 {
13767 case IEMMODE_16BIT:
13768 IEM_MC_BEGIN(0, 0);
13769 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13771 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13772 IEM_MC_ADVANCE_RIP();
13773 IEM_MC_END();
13774 return VINF_SUCCESS;
13775
13776 case IEMMODE_32BIT:
13777 IEM_MC_BEGIN(0, 0);
13778 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13780 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13781 IEM_MC_ADVANCE_RIP();
13782 IEM_MC_END();
13783 return VINF_SUCCESS;
13784
13785 case IEMMODE_64BIT:
13786 IEM_MC_BEGIN(0, 0);
13787 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13789 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13790 IEM_MC_ADVANCE_RIP();
13791 IEM_MC_END();
13792 return VINF_SUCCESS;
13793
13794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13795 }
13796 }
13797 else
13798 {
13799 /* memory access. */
13800 switch (pVCpu->iem.s.enmEffOpSize)
13801 {
13802 case IEMMODE_16BIT:
13803 IEM_MC_BEGIN(0, 1);
13804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13806 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13808 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13809 IEM_MC_ADVANCE_RIP();
13810 IEM_MC_END();
13811 return VINF_SUCCESS;
13812
13813 case IEMMODE_32BIT:
13814 IEM_MC_BEGIN(0, 1);
13815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13817 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13819 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13820 IEM_MC_ADVANCE_RIP();
13821 IEM_MC_END();
13822 return VINF_SUCCESS;
13823
13824 case IEMMODE_64BIT:
13825 IEM_MC_BEGIN(0, 1);
13826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13828 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13830 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13831 IEM_MC_ADVANCE_RIP();
13832 IEM_MC_END();
13833 return VINF_SUCCESS;
13834
13835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13836 }
13837 }
13838}
13839
13840
13841
13842
13843/** Opcode 0xc8. */
13844FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13845{
13846 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13847 IEMOP_HLP_MIN_186();
13848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13849 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13850 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13852 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13853}
13854
13855
13856/** Opcode 0xc9. */
13857FNIEMOP_DEF(iemOp_leave)
13858{
13859 IEMOP_MNEMONIC(leave, "leave");
13860 IEMOP_HLP_MIN_186();
13861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13863 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13864}
13865
13866
13867/** Opcode 0xca. */
13868FNIEMOP_DEF(iemOp_retf_Iw)
13869{
13870 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13871 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13873 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13874 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13875}
13876
13877
13878/** Opcode 0xcb. */
13879FNIEMOP_DEF(iemOp_retf)
13880{
13881 IEMOP_MNEMONIC(retf, "retf");
13882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13883 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13884 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13885}
13886
13887
13888/** Opcode 0xcc. */
13889FNIEMOP_DEF(iemOp_int_3)
13890{
13891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13892 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13893}
13894
13895
13896/** Opcode 0xcd. */
13897FNIEMOP_DEF(iemOp_int_Ib)
13898{
13899 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13901 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13902}
13903
13904
13905/** Opcode 0xce. */
13906FNIEMOP_DEF(iemOp_into)
13907{
13908 IEMOP_MNEMONIC(into, "into");
13909 IEMOP_HLP_NO_64BIT();
13910
13911 IEM_MC_BEGIN(2, 0);
13912 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13913 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13914 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13915 IEM_MC_END();
13916 return VINF_SUCCESS;
13917}
13918
13919
13920/** Opcode 0xcf. */
13921FNIEMOP_DEF(iemOp_iret)
13922{
13923 IEMOP_MNEMONIC(iret, "iret");
13924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13925 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13926}
13927
13928
13929/** Opcode 0xd0. */
13930FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13931{
13932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13933 PCIEMOPSHIFTSIZES pImpl;
13934 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13935 {
13936 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13937 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13938 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13939 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13940 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13941 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13942 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13943 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13944 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13945 }
13946 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13947
13948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13949 {
13950 /* register */
13951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13952 IEM_MC_BEGIN(3, 0);
13953 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13954 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13955 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13956 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13957 IEM_MC_REF_EFLAGS(pEFlags);
13958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13959 IEM_MC_ADVANCE_RIP();
13960 IEM_MC_END();
13961 }
13962 else
13963 {
13964 /* memory */
13965 IEM_MC_BEGIN(3, 2);
13966 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13967 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13968 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13970
13971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13973 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13974 IEM_MC_FETCH_EFLAGS(EFlags);
13975 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13976
13977 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13978 IEM_MC_COMMIT_EFLAGS(EFlags);
13979 IEM_MC_ADVANCE_RIP();
13980 IEM_MC_END();
13981 }
13982 return VINF_SUCCESS;
13983}
13984
13985
13986
13987/** Opcode 0xd1. */
13988FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13989{
13990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13991 PCIEMOPSHIFTSIZES pImpl;
13992 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13993 {
13994 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13995 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13996 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13997 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13998 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13999 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
14000 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
14001 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14002 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
14003 }
14004 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14005
14006 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14007 {
14008 /* register */
14009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14010 switch (pVCpu->iem.s.enmEffOpSize)
14011 {
14012 case IEMMODE_16BIT:
14013 IEM_MC_BEGIN(3, 0);
14014 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14015 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14016 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14017 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14018 IEM_MC_REF_EFLAGS(pEFlags);
14019 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14020 IEM_MC_ADVANCE_RIP();
14021 IEM_MC_END();
14022 return VINF_SUCCESS;
14023
14024 case IEMMODE_32BIT:
14025 IEM_MC_BEGIN(3, 0);
14026 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14027 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14029 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14030 IEM_MC_REF_EFLAGS(pEFlags);
14031 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14032 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14033 IEM_MC_ADVANCE_RIP();
14034 IEM_MC_END();
14035 return VINF_SUCCESS;
14036
14037 case IEMMODE_64BIT:
14038 IEM_MC_BEGIN(3, 0);
14039 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14040 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14041 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14042 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14043 IEM_MC_REF_EFLAGS(pEFlags);
14044 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14045 IEM_MC_ADVANCE_RIP();
14046 IEM_MC_END();
14047 return VINF_SUCCESS;
14048
14049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14050 }
14051 }
14052 else
14053 {
14054 /* memory */
14055 switch (pVCpu->iem.s.enmEffOpSize)
14056 {
14057 case IEMMODE_16BIT:
14058 IEM_MC_BEGIN(3, 2);
14059 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14060 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14061 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14063
14064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14066 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14067 IEM_MC_FETCH_EFLAGS(EFlags);
14068 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14069
14070 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14071 IEM_MC_COMMIT_EFLAGS(EFlags);
14072 IEM_MC_ADVANCE_RIP();
14073 IEM_MC_END();
14074 return VINF_SUCCESS;
14075
14076 case IEMMODE_32BIT:
14077 IEM_MC_BEGIN(3, 2);
14078 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14079 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14080 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14082
14083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14085 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14086 IEM_MC_FETCH_EFLAGS(EFlags);
14087 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14088
14089 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14090 IEM_MC_COMMIT_EFLAGS(EFlags);
14091 IEM_MC_ADVANCE_RIP();
14092 IEM_MC_END();
14093 return VINF_SUCCESS;
14094
14095 case IEMMODE_64BIT:
14096 IEM_MC_BEGIN(3, 2);
14097 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14098 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14099 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14101
14102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14104 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14105 IEM_MC_FETCH_EFLAGS(EFlags);
14106 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14107
14108 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14109 IEM_MC_COMMIT_EFLAGS(EFlags);
14110 IEM_MC_ADVANCE_RIP();
14111 IEM_MC_END();
14112 return VINF_SUCCESS;
14113
14114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14115 }
14116 }
14117}
14118
14119
14120/** Opcode 0xd2. */
14121FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
14122{
14123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14124 PCIEMOPSHIFTSIZES pImpl;
14125 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14126 {
14127 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
14128 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
14129 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
14130 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
14131 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
14132 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
14133 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
14134 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14135 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
14136 }
14137 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14138
14139 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14140 {
14141 /* register */
14142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14143 IEM_MC_BEGIN(3, 0);
14144 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14145 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14146 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14147 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14148 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14149 IEM_MC_REF_EFLAGS(pEFlags);
14150 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14151 IEM_MC_ADVANCE_RIP();
14152 IEM_MC_END();
14153 }
14154 else
14155 {
14156 /* memory */
14157 IEM_MC_BEGIN(3, 2);
14158 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14159 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14160 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14162
14163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14165 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14166 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14167 IEM_MC_FETCH_EFLAGS(EFlags);
14168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14169
14170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14171 IEM_MC_COMMIT_EFLAGS(EFlags);
14172 IEM_MC_ADVANCE_RIP();
14173 IEM_MC_END();
14174 }
14175 return VINF_SUCCESS;
14176}
14177
14178
14179/** Opcode 0xd3. */
14180FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
14181{
14182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14183 PCIEMOPSHIFTSIZES pImpl;
14184 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14185 {
14186 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
14187 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
14188 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
14189 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
14190 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
14191 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
14192 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
14193 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14194 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
14195 }
14196 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14197
14198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14199 {
14200 /* register */
14201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14202 switch (pVCpu->iem.s.enmEffOpSize)
14203 {
14204 case IEMMODE_16BIT:
14205 IEM_MC_BEGIN(3, 0);
14206 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14207 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14208 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14209 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14210 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14211 IEM_MC_REF_EFLAGS(pEFlags);
14212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14213 IEM_MC_ADVANCE_RIP();
14214 IEM_MC_END();
14215 return VINF_SUCCESS;
14216
14217 case IEMMODE_32BIT:
14218 IEM_MC_BEGIN(3, 0);
14219 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14220 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14221 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14222 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14223 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14224 IEM_MC_REF_EFLAGS(pEFlags);
14225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14226 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14227 IEM_MC_ADVANCE_RIP();
14228 IEM_MC_END();
14229 return VINF_SUCCESS;
14230
14231 case IEMMODE_64BIT:
14232 IEM_MC_BEGIN(3, 0);
14233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14234 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14235 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14236 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14237 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14238 IEM_MC_REF_EFLAGS(pEFlags);
14239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14240 IEM_MC_ADVANCE_RIP();
14241 IEM_MC_END();
14242 return VINF_SUCCESS;
14243
14244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14245 }
14246 }
14247 else
14248 {
14249 /* memory */
14250 switch (pVCpu->iem.s.enmEffOpSize)
14251 {
14252 case IEMMODE_16BIT:
14253 IEM_MC_BEGIN(3, 2);
14254 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14255 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14256 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14258
14259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14261 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14262 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14263 IEM_MC_FETCH_EFLAGS(EFlags);
14264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14265
14266 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14267 IEM_MC_COMMIT_EFLAGS(EFlags);
14268 IEM_MC_ADVANCE_RIP();
14269 IEM_MC_END();
14270 return VINF_SUCCESS;
14271
14272 case IEMMODE_32BIT:
14273 IEM_MC_BEGIN(3, 2);
14274 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14275 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14276 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14278
14279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14281 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14282 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14283 IEM_MC_FETCH_EFLAGS(EFlags);
14284 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14285
14286 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14287 IEM_MC_COMMIT_EFLAGS(EFlags);
14288 IEM_MC_ADVANCE_RIP();
14289 IEM_MC_END();
14290 return VINF_SUCCESS;
14291
14292 case IEMMODE_64BIT:
14293 IEM_MC_BEGIN(3, 2);
14294 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14295 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14296 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14298
14299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14301 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14302 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14303 IEM_MC_FETCH_EFLAGS(EFlags);
14304 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14305
14306 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14307 IEM_MC_COMMIT_EFLAGS(EFlags);
14308 IEM_MC_ADVANCE_RIP();
14309 IEM_MC_END();
14310 return VINF_SUCCESS;
14311
14312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14313 }
14314 }
14315}
14316
14317/** Opcode 0xd4. */
14318FNIEMOP_DEF(iemOp_aam_Ib)
14319{
14320 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14321 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14323 IEMOP_HLP_NO_64BIT();
14324 if (!bImm)
14325 return IEMOP_RAISE_DIVIDE_ERROR();
14326 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14327}
14328
14329
14330/** Opcode 0xd5. */
14331FNIEMOP_DEF(iemOp_aad_Ib)
14332{
14333 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14334 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14336 IEMOP_HLP_NO_64BIT();
14337 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14338}
14339
14340
14341/** Opcode 0xd6. */
14342FNIEMOP_DEF(iemOp_salc)
14343{
14344 IEMOP_MNEMONIC(salc, "salc");
14345 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14346 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14348 IEMOP_HLP_NO_64BIT();
14349
14350 IEM_MC_BEGIN(0, 0);
14351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14352 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14353 } IEM_MC_ELSE() {
14354 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14355 } IEM_MC_ENDIF();
14356 IEM_MC_ADVANCE_RIP();
14357 IEM_MC_END();
14358 return VINF_SUCCESS;
14359}
14360
14361
14362/** Opcode 0xd7. */
14363FNIEMOP_DEF(iemOp_xlat)
14364{
14365 IEMOP_MNEMONIC(xlat, "xlat");
14366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14367 switch (pVCpu->iem.s.enmEffAddrMode)
14368 {
14369 case IEMMODE_16BIT:
14370 IEM_MC_BEGIN(2, 0);
14371 IEM_MC_LOCAL(uint8_t, u8Tmp);
14372 IEM_MC_LOCAL(uint16_t, u16Addr);
14373 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14374 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14375 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14376 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14377 IEM_MC_ADVANCE_RIP();
14378 IEM_MC_END();
14379 return VINF_SUCCESS;
14380
14381 case IEMMODE_32BIT:
14382 IEM_MC_BEGIN(2, 0);
14383 IEM_MC_LOCAL(uint8_t, u8Tmp);
14384 IEM_MC_LOCAL(uint32_t, u32Addr);
14385 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14386 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14387 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14388 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14389 IEM_MC_ADVANCE_RIP();
14390 IEM_MC_END();
14391 return VINF_SUCCESS;
14392
14393 case IEMMODE_64BIT:
14394 IEM_MC_BEGIN(2, 0);
14395 IEM_MC_LOCAL(uint8_t, u8Tmp);
14396 IEM_MC_LOCAL(uint64_t, u64Addr);
14397 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14398 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14399 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14400 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14401 IEM_MC_ADVANCE_RIP();
14402 IEM_MC_END();
14403 return VINF_SUCCESS;
14404
14405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14406 }
14407}
14408
14409
14410/**
14411 * Common worker for FPU instructions working on ST0 and STn, and storing the
14412 * result in ST0.
14413 *
14414 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14415 */
14416FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14417{
14418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14419
14420 IEM_MC_BEGIN(3, 1);
14421 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14422 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14423 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14424 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14425
14426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14427 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14428 IEM_MC_PREPARE_FPU_USAGE();
14429 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14430 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14431 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14432 IEM_MC_ELSE()
14433 IEM_MC_FPU_STACK_UNDERFLOW(0);
14434 IEM_MC_ENDIF();
14435 IEM_MC_ADVANCE_RIP();
14436
14437 IEM_MC_END();
14438 return VINF_SUCCESS;
14439}
14440
14441
14442/**
14443 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14444 * flags.
14445 *
14446 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14447 */
14448FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14449{
14450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14451
14452 IEM_MC_BEGIN(3, 1);
14453 IEM_MC_LOCAL(uint16_t, u16Fsw);
14454 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14456 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14457
14458 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14459 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14460 IEM_MC_PREPARE_FPU_USAGE();
14461 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14462 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14463 IEM_MC_UPDATE_FSW(u16Fsw);
14464 IEM_MC_ELSE()
14465 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14466 IEM_MC_ENDIF();
14467 IEM_MC_ADVANCE_RIP();
14468
14469 IEM_MC_END();
14470 return VINF_SUCCESS;
14471}
14472
14473
14474/**
14475 * Common worker for FPU instructions working on ST0 and STn, only affecting
14476 * flags, and popping when done.
14477 *
14478 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14479 */
14480FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14481{
14482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14483
14484 IEM_MC_BEGIN(3, 1);
14485 IEM_MC_LOCAL(uint16_t, u16Fsw);
14486 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14487 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14488 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14489
14490 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14491 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14492 IEM_MC_PREPARE_FPU_USAGE();
14493 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14494 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14495 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14496 IEM_MC_ELSE()
14497 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14498 IEM_MC_ENDIF();
14499 IEM_MC_ADVANCE_RIP();
14500
14501 IEM_MC_END();
14502 return VINF_SUCCESS;
14503}
14504
14505
14506/** Opcode 0xd8 11/0. */
14507FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14508{
14509 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14510 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14511}
14512
14513
14514/** Opcode 0xd8 11/1. */
14515FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14516{
14517 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14518 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14519}
14520
14521
14522/** Opcode 0xd8 11/2. */
14523FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14524{
14525 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14526 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14527}
14528
14529
14530/** Opcode 0xd8 11/3. */
14531FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14532{
14533 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14534 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14535}
14536
14537
14538/** Opcode 0xd8 11/4. */
14539FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14540{
14541 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14542 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14543}
14544
14545
14546/** Opcode 0xd8 11/5. */
14547FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14548{
14549 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14550 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14551}
14552
14553
14554/** Opcode 0xd8 11/6. */
14555FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14556{
14557 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14558 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14559}
14560
14561
14562/** Opcode 0xd8 11/7. */
14563FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14564{
14565 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14566 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14567}
14568
14569
14570/**
14571 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14572 * the result in ST0.
14573 *
14574 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14575 */
14576FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14577{
14578 IEM_MC_BEGIN(3, 3);
14579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14580 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14581 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14582 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14583 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14584 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14585
14586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14588
14589 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14590 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14591 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14592
14593 IEM_MC_PREPARE_FPU_USAGE();
14594 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14595 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14596 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14597 IEM_MC_ELSE()
14598 IEM_MC_FPU_STACK_UNDERFLOW(0);
14599 IEM_MC_ENDIF();
14600 IEM_MC_ADVANCE_RIP();
14601
14602 IEM_MC_END();
14603 return VINF_SUCCESS;
14604}
14605
14606
14607/** Opcode 0xd8 !11/0. */
14608FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14609{
14610 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14611 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14612}
14613
14614
14615/** Opcode 0xd8 !11/1. */
14616FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14617{
14618 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14619 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14620}
14621
14622
14623/** Opcode 0xd8 !11/2. */
14624FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14625{
14626 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14627
14628 IEM_MC_BEGIN(3, 3);
14629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14630 IEM_MC_LOCAL(uint16_t, u16Fsw);
14631 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14632 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14633 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14634 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14635
14636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14638
14639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14641 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14642
14643 IEM_MC_PREPARE_FPU_USAGE();
14644 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14645 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14646 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14647 IEM_MC_ELSE()
14648 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14649 IEM_MC_ENDIF();
14650 IEM_MC_ADVANCE_RIP();
14651
14652 IEM_MC_END();
14653 return VINF_SUCCESS;
14654}
14655
14656
14657/** Opcode 0xd8 !11/3. */
14658FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14659{
14660 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14661
14662 IEM_MC_BEGIN(3, 3);
14663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14664 IEM_MC_LOCAL(uint16_t, u16Fsw);
14665 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14666 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14667 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14668 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14669
14670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14672
14673 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14674 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14675 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14676
14677 IEM_MC_PREPARE_FPU_USAGE();
14678 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14679 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14680 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14681 IEM_MC_ELSE()
14682 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14683 IEM_MC_ENDIF();
14684 IEM_MC_ADVANCE_RIP();
14685
14686 IEM_MC_END();
14687 return VINF_SUCCESS;
14688}
14689
14690
14691/** Opcode 0xd8 !11/4. */
14692FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14693{
14694 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14695 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14696}
14697
14698
14699/** Opcode 0xd8 !11/5. */
14700FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14701{
14702 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14703 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14704}
14705
14706
14707/** Opcode 0xd8 !11/6. */
14708FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14709{
14710 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14711 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14712}
14713
14714
14715/** Opcode 0xd8 !11/7. */
14716FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14717{
14718 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14719 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14720}
14721
14722
14723/** Opcode 0xd8. */
14724FNIEMOP_DEF(iemOp_EscF0)
14725{
14726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14727 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14728
14729 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14730 {
14731 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14732 {
14733 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14734 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14735 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14736 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14737 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14738 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14739 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14740 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14742 }
14743 }
14744 else
14745 {
14746 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14747 {
14748 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14749 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14750 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14751 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14752 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14753 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14754 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14755 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14757 }
14758 }
14759}
14760
14761
14762/** Opcode 0xd9 /0 mem32real
14763 * @sa iemOp_fld_m64r */
14764FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14765{
14766 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14767
14768 IEM_MC_BEGIN(2, 3);
14769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14770 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14771 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14772 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14773 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14774
14775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14777
14778 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14779 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14780 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14781
14782 IEM_MC_PREPARE_FPU_USAGE();
14783 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14784 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14785 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14786 IEM_MC_ELSE()
14787 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14788 IEM_MC_ENDIF();
14789 IEM_MC_ADVANCE_RIP();
14790
14791 IEM_MC_END();
14792 return VINF_SUCCESS;
14793}
14794
14795
14796/** Opcode 0xd9 !11/2 mem32real */
14797FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14798{
14799 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14800 IEM_MC_BEGIN(3, 2);
14801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14802 IEM_MC_LOCAL(uint16_t, u16Fsw);
14803 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14804 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14805 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14806
14807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14809 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14810 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14811
14812 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14813 IEM_MC_PREPARE_FPU_USAGE();
14814 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14815 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14816 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14817 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14818 IEM_MC_ELSE()
14819 IEM_MC_IF_FCW_IM()
14820 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14821 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14822 IEM_MC_ENDIF();
14823 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14824 IEM_MC_ENDIF();
14825 IEM_MC_ADVANCE_RIP();
14826
14827 IEM_MC_END();
14828 return VINF_SUCCESS;
14829}
14830
14831
14832/** Opcode 0xd9 !11/3 */
14833FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14834{
14835 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14836 IEM_MC_BEGIN(3, 2);
14837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14838 IEM_MC_LOCAL(uint16_t, u16Fsw);
14839 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14840 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14841 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14842
14843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14845 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14846 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14847
14848 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14849 IEM_MC_PREPARE_FPU_USAGE();
14850 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14851 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14852 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14853 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14854 IEM_MC_ELSE()
14855 IEM_MC_IF_FCW_IM()
14856 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14857 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14858 IEM_MC_ENDIF();
14859 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14860 IEM_MC_ENDIF();
14861 IEM_MC_ADVANCE_RIP();
14862
14863 IEM_MC_END();
14864 return VINF_SUCCESS;
14865}
14866
14867
14868/** Opcode 0xd9 !11/4 */
14869FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14870{
14871 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14872 IEM_MC_BEGIN(3, 0);
14873 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14874 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14875 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14879 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14880 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14881 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14882 IEM_MC_END();
14883 return VINF_SUCCESS;
14884}
14885
14886
14887/** Opcode 0xd9 !11/5 */
14888FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14889{
14890 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14891 IEM_MC_BEGIN(1, 1);
14892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14893 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14896 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14897 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14898 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14899 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14900 IEM_MC_END();
14901 return VINF_SUCCESS;
14902}
14903
14904
14905/** Opcode 0xd9 !11/6 */
14906FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14907{
14908 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14909 IEM_MC_BEGIN(3, 0);
14910 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14911 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14912 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14916 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14917 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14918 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14919 IEM_MC_END();
14920 return VINF_SUCCESS;
14921}
14922
14923
14924/** Opcode 0xd9 !11/7 */
14925FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14926{
14927 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14928 IEM_MC_BEGIN(2, 0);
14929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14930 IEM_MC_LOCAL(uint16_t, u16Fcw);
14931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14933 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14934 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14935 IEM_MC_FETCH_FCW(u16Fcw);
14936 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14937 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14938 IEM_MC_END();
14939 return VINF_SUCCESS;
14940}
14941
14942
14943/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14944FNIEMOP_DEF(iemOp_fnop)
14945{
14946 IEMOP_MNEMONIC(fnop, "fnop");
14947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14948
14949 IEM_MC_BEGIN(0, 0);
14950 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14951 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14952 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14953 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14954 * intel optimizations. Investigate. */
14955 IEM_MC_UPDATE_FPU_OPCODE_IP();
14956 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14957 IEM_MC_END();
14958 return VINF_SUCCESS;
14959}
14960
14961
14962/** Opcode 0xd9 11/0 stN */
14963FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14964{
14965 IEMOP_MNEMONIC(fld_stN, "fld stN");
14966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14967
14968 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14969 * indicates that it does. */
14970 IEM_MC_BEGIN(0, 2);
14971 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14972 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14973 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14974 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14975
14976 IEM_MC_PREPARE_FPU_USAGE();
14977 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14978 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14979 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14980 IEM_MC_ELSE()
14981 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14982 IEM_MC_ENDIF();
14983
14984 IEM_MC_ADVANCE_RIP();
14985 IEM_MC_END();
14986
14987 return VINF_SUCCESS;
14988}
14989
14990
14991/** Opcode 0xd9 11/3 stN */
14992FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14993{
14994 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14996
14997 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14998 * indicates that it does. */
14999 IEM_MC_BEGIN(1, 3);
15000 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
15001 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
15002 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15003 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
15004 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15005 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15006
15007 IEM_MC_PREPARE_FPU_USAGE();
15008 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
15009 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
15010 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
15011 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15012 IEM_MC_ELSE()
15013 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
15014 IEM_MC_ENDIF();
15015
15016 IEM_MC_ADVANCE_RIP();
15017 IEM_MC_END();
15018
15019 return VINF_SUCCESS;
15020}
15021
15022
15023/** Opcode 0xd9 11/4, 0xdd 11/2. */
15024FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
15025{
15026 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
15027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15028
15029 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
15030 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
15031 if (!iDstReg)
15032 {
15033 IEM_MC_BEGIN(0, 1);
15034 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
15035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15037
15038 IEM_MC_PREPARE_FPU_USAGE();
15039 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
15040 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
15041 IEM_MC_ELSE()
15042 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
15043 IEM_MC_ENDIF();
15044
15045 IEM_MC_ADVANCE_RIP();
15046 IEM_MC_END();
15047 }
15048 else
15049 {
15050 IEM_MC_BEGIN(0, 2);
15051 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15052 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15054 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15055
15056 IEM_MC_PREPARE_FPU_USAGE();
15057 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15058 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15059 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
15060 IEM_MC_ELSE()
15061 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
15062 IEM_MC_ENDIF();
15063
15064 IEM_MC_ADVANCE_RIP();
15065 IEM_MC_END();
15066 }
15067 return VINF_SUCCESS;
15068}
15069
15070
15071/**
15072 * Common worker for FPU instructions working on ST0 and replaces it with the
15073 * result, i.e. unary operators.
15074 *
15075 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15076 */
15077FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
15078{
15079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15080
15081 IEM_MC_BEGIN(2, 1);
15082 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15083 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15084 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15085
15086 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15087 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15088 IEM_MC_PREPARE_FPU_USAGE();
15089 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15090 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
15091 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15092 IEM_MC_ELSE()
15093 IEM_MC_FPU_STACK_UNDERFLOW(0);
15094 IEM_MC_ENDIF();
15095 IEM_MC_ADVANCE_RIP();
15096
15097 IEM_MC_END();
15098 return VINF_SUCCESS;
15099}
15100
15101
15102/** Opcode 0xd9 0xe0. */
15103FNIEMOP_DEF(iemOp_fchs)
15104{
15105 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
15106 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
15107}
15108
15109
15110/** Opcode 0xd9 0xe1. */
15111FNIEMOP_DEF(iemOp_fabs)
15112{
15113 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
15114 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
15115}
15116
15117
15118/**
15119 * Common worker for FPU instructions working on ST0 and only returns FSW.
15120 *
15121 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15122 */
15123FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
15124{
15125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15126
15127 IEM_MC_BEGIN(2, 1);
15128 IEM_MC_LOCAL(uint16_t, u16Fsw);
15129 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15130 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15131
15132 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15133 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15134 IEM_MC_PREPARE_FPU_USAGE();
15135 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15136 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
15137 IEM_MC_UPDATE_FSW(u16Fsw);
15138 IEM_MC_ELSE()
15139 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
15140 IEM_MC_ENDIF();
15141 IEM_MC_ADVANCE_RIP();
15142
15143 IEM_MC_END();
15144 return VINF_SUCCESS;
15145}
15146
15147
15148/** Opcode 0xd9 0xe4. */
15149FNIEMOP_DEF(iemOp_ftst)
15150{
15151 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
15152 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
15153}
15154
15155
15156/** Opcode 0xd9 0xe5. */
15157FNIEMOP_DEF(iemOp_fxam)
15158{
15159 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
15160 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
15161}
15162
15163
15164/**
15165 * Common worker for FPU instructions pushing a constant onto the FPU stack.
15166 *
15167 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15168 */
15169FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
15170{
15171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15172
15173 IEM_MC_BEGIN(1, 1);
15174 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15175 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15176
15177 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15178 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15179 IEM_MC_PREPARE_FPU_USAGE();
15180 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15181 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
15182 IEM_MC_PUSH_FPU_RESULT(FpuRes);
15183 IEM_MC_ELSE()
15184 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
15185 IEM_MC_ENDIF();
15186 IEM_MC_ADVANCE_RIP();
15187
15188 IEM_MC_END();
15189 return VINF_SUCCESS;
15190}
15191
15192
15193/** Opcode 0xd9 0xe8. */
15194FNIEMOP_DEF(iemOp_fld1)
15195{
15196 IEMOP_MNEMONIC(fld1, "fld1");
15197 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
15198}
15199
15200
15201/** Opcode 0xd9 0xe9. */
15202FNIEMOP_DEF(iemOp_fldl2t)
15203{
15204 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15205 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15206}
15207
15208
15209/** Opcode 0xd9 0xea. */
15210FNIEMOP_DEF(iemOp_fldl2e)
15211{
15212 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15213 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15214}
15215
15216/** Opcode 0xd9 0xeb. */
15217FNIEMOP_DEF(iemOp_fldpi)
15218{
15219 IEMOP_MNEMONIC(fldpi, "fldpi");
15220 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15221}
15222
15223
15224/** Opcode 0xd9 0xec. */
15225FNIEMOP_DEF(iemOp_fldlg2)
15226{
15227 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15228 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15229}
15230
15231/** Opcode 0xd9 0xed. */
15232FNIEMOP_DEF(iemOp_fldln2)
15233{
15234 IEMOP_MNEMONIC(fldln2, "fldln2");
15235 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15236}
15237
15238
15239/** Opcode 0xd9 0xee. */
15240FNIEMOP_DEF(iemOp_fldz)
15241{
15242 IEMOP_MNEMONIC(fldz, "fldz");
15243 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15244}
15245
15246
15247/** Opcode 0xd9 0xf0. */
15248FNIEMOP_DEF(iemOp_f2xm1)
15249{
15250 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15251 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15252}
15253
15254
15255/**
15256 * Common worker for FPU instructions working on STn and ST0, storing the result
15257 * in STn, and popping the stack unless IE, DE or ZE was raised.
15258 *
15259 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15260 */
15261FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15262{
15263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15264
15265 IEM_MC_BEGIN(3, 1);
15266 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15267 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15268 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15269 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15270
15271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15272 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15273
15274 IEM_MC_PREPARE_FPU_USAGE();
15275 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15276 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15277 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15278 IEM_MC_ELSE()
15279 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15280 IEM_MC_ENDIF();
15281 IEM_MC_ADVANCE_RIP();
15282
15283 IEM_MC_END();
15284 return VINF_SUCCESS;
15285}
15286
15287
15288/** Opcode 0xd9 0xf1. */
15289FNIEMOP_DEF(iemOp_fyl2x)
15290{
15291 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15292 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15293}
15294
15295
15296/**
15297 * Common worker for FPU instructions working on ST0 and having two outputs, one
15298 * replacing ST0 and one pushed onto the stack.
15299 *
15300 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15301 */
15302FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15303{
15304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15305
15306 IEM_MC_BEGIN(2, 1);
15307 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15308 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15309 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15310
15311 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15312 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15313 IEM_MC_PREPARE_FPU_USAGE();
15314 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15315 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15316 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15317 IEM_MC_ELSE()
15318 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15319 IEM_MC_ENDIF();
15320 IEM_MC_ADVANCE_RIP();
15321
15322 IEM_MC_END();
15323 return VINF_SUCCESS;
15324}
15325
15326
15327/** Opcode 0xd9 0xf2. */
15328FNIEMOP_DEF(iemOp_fptan)
15329{
15330 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15331 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15332}
15333
15334
15335/** Opcode 0xd9 0xf3. */
15336FNIEMOP_DEF(iemOp_fpatan)
15337{
15338 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15339 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15340}
15341
15342
15343/** Opcode 0xd9 0xf4. */
15344FNIEMOP_DEF(iemOp_fxtract)
15345{
15346 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15347 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15348}
15349
15350
15351/** Opcode 0xd9 0xf5. */
15352FNIEMOP_DEF(iemOp_fprem1)
15353{
15354 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15355 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15356}
15357
15358
15359/** Opcode 0xd9 0xf6. */
15360FNIEMOP_DEF(iemOp_fdecstp)
15361{
15362 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15364 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15365 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15366 * FINCSTP and FDECSTP. */
15367
15368 IEM_MC_BEGIN(0,0);
15369
15370 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15371 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15372
15373 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15374 IEM_MC_FPU_STACK_DEC_TOP();
15375 IEM_MC_UPDATE_FSW_CONST(0);
15376
15377 IEM_MC_ADVANCE_RIP();
15378 IEM_MC_END();
15379 return VINF_SUCCESS;
15380}
15381
15382
15383/** Opcode 0xd9 0xf7. */
15384FNIEMOP_DEF(iemOp_fincstp)
15385{
15386 IEMOP_MNEMONIC(fincstp, "fincstp");
15387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15388 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15389 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15390 * FINCSTP and FDECSTP. */
15391
15392 IEM_MC_BEGIN(0,0);
15393
15394 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15395 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15396
15397 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15398 IEM_MC_FPU_STACK_INC_TOP();
15399 IEM_MC_UPDATE_FSW_CONST(0);
15400
15401 IEM_MC_ADVANCE_RIP();
15402 IEM_MC_END();
15403 return VINF_SUCCESS;
15404}
15405
15406
15407/** Opcode 0xd9 0xf8. */
15408FNIEMOP_DEF(iemOp_fprem)
15409{
15410 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15411 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15412}
15413
15414
15415/** Opcode 0xd9 0xf9. */
15416FNIEMOP_DEF(iemOp_fyl2xp1)
15417{
15418 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15419 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15420}
15421
15422
15423/** Opcode 0xd9 0xfa. */
15424FNIEMOP_DEF(iemOp_fsqrt)
15425{
15426 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15427 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15428}
15429
15430
15431/** Opcode 0xd9 0xfb. */
15432FNIEMOP_DEF(iemOp_fsincos)
15433{
15434 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15435 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15436}
15437
15438
15439/** Opcode 0xd9 0xfc. */
15440FNIEMOP_DEF(iemOp_frndint)
15441{
15442 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15443 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15444}
15445
15446
15447/** Opcode 0xd9 0xfd. */
15448FNIEMOP_DEF(iemOp_fscale)
15449{
15450 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15451 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15452}
15453
15454
15455/** Opcode 0xd9 0xfe. */
15456FNIEMOP_DEF(iemOp_fsin)
15457{
15458 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15459 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15460}
15461
15462
15463/** Opcode 0xd9 0xff. */
15464FNIEMOP_DEF(iemOp_fcos)
15465{
15466 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15467 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15468}
15469
15470
15471/** Used by iemOp_EscF1. */
15472IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15473{
15474 /* 0xe0 */ iemOp_fchs,
15475 /* 0xe1 */ iemOp_fabs,
15476 /* 0xe2 */ iemOp_Invalid,
15477 /* 0xe3 */ iemOp_Invalid,
15478 /* 0xe4 */ iemOp_ftst,
15479 /* 0xe5 */ iemOp_fxam,
15480 /* 0xe6 */ iemOp_Invalid,
15481 /* 0xe7 */ iemOp_Invalid,
15482 /* 0xe8 */ iemOp_fld1,
15483 /* 0xe9 */ iemOp_fldl2t,
15484 /* 0xea */ iemOp_fldl2e,
15485 /* 0xeb */ iemOp_fldpi,
15486 /* 0xec */ iemOp_fldlg2,
15487 /* 0xed */ iemOp_fldln2,
15488 /* 0xee */ iemOp_fldz,
15489 /* 0xef */ iemOp_Invalid,
15490 /* 0xf0 */ iemOp_f2xm1,
15491 /* 0xf1 */ iemOp_fyl2x,
15492 /* 0xf2 */ iemOp_fptan,
15493 /* 0xf3 */ iemOp_fpatan,
15494 /* 0xf4 */ iemOp_fxtract,
15495 /* 0xf5 */ iemOp_fprem1,
15496 /* 0xf6 */ iemOp_fdecstp,
15497 /* 0xf7 */ iemOp_fincstp,
15498 /* 0xf8 */ iemOp_fprem,
15499 /* 0xf9 */ iemOp_fyl2xp1,
15500 /* 0xfa */ iemOp_fsqrt,
15501 /* 0xfb */ iemOp_fsincos,
15502 /* 0xfc */ iemOp_frndint,
15503 /* 0xfd */ iemOp_fscale,
15504 /* 0xfe */ iemOp_fsin,
15505 /* 0xff */ iemOp_fcos
15506};
15507
15508
15509/** Opcode 0xd9. */
15510FNIEMOP_DEF(iemOp_EscF1)
15511{
15512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15513 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15514
15515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15516 {
15517 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15518 {
15519 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15520 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15521 case 2:
15522 if (bRm == 0xd0)
15523 return FNIEMOP_CALL(iemOp_fnop);
15524 return IEMOP_RAISE_INVALID_OPCODE();
15525 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15526 case 4:
15527 case 5:
15528 case 6:
15529 case 7:
15530 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15531 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15533 }
15534 }
15535 else
15536 {
15537 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15538 {
15539 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15540 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15541 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15542 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15543 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15544 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15545 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15546 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15548 }
15549 }
15550}
15551
15552
15553/** Opcode 0xda 11/0. */
15554FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15555{
15556 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15558
15559 IEM_MC_BEGIN(0, 1);
15560 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15561
15562 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15563 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15564
15565 IEM_MC_PREPARE_FPU_USAGE();
15566 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15568 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15569 IEM_MC_ENDIF();
15570 IEM_MC_UPDATE_FPU_OPCODE_IP();
15571 IEM_MC_ELSE()
15572 IEM_MC_FPU_STACK_UNDERFLOW(0);
15573 IEM_MC_ENDIF();
15574 IEM_MC_ADVANCE_RIP();
15575
15576 IEM_MC_END();
15577 return VINF_SUCCESS;
15578}
15579
15580
15581/** Opcode 0xda 11/1. */
15582FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15583{
15584 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15586
15587 IEM_MC_BEGIN(0, 1);
15588 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15589
15590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15591 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15592
15593 IEM_MC_PREPARE_FPU_USAGE();
15594 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15596 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15597 IEM_MC_ENDIF();
15598 IEM_MC_UPDATE_FPU_OPCODE_IP();
15599 IEM_MC_ELSE()
15600 IEM_MC_FPU_STACK_UNDERFLOW(0);
15601 IEM_MC_ENDIF();
15602 IEM_MC_ADVANCE_RIP();
15603
15604 IEM_MC_END();
15605 return VINF_SUCCESS;
15606}
15607
15608
15609/** Opcode 0xda 11/2. */
15610FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15611{
15612 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15614
15615 IEM_MC_BEGIN(0, 1);
15616 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15617
15618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15619 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15620
15621 IEM_MC_PREPARE_FPU_USAGE();
15622 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15623 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15624 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15625 IEM_MC_ENDIF();
15626 IEM_MC_UPDATE_FPU_OPCODE_IP();
15627 IEM_MC_ELSE()
15628 IEM_MC_FPU_STACK_UNDERFLOW(0);
15629 IEM_MC_ENDIF();
15630 IEM_MC_ADVANCE_RIP();
15631
15632 IEM_MC_END();
15633 return VINF_SUCCESS;
15634}
15635
15636
15637/** Opcode 0xda 11/3. */
15638FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15639{
15640 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15642
15643 IEM_MC_BEGIN(0, 1);
15644 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15645
15646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15647 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15648
15649 IEM_MC_PREPARE_FPU_USAGE();
15650 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15652 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15653 IEM_MC_ENDIF();
15654 IEM_MC_UPDATE_FPU_OPCODE_IP();
15655 IEM_MC_ELSE()
15656 IEM_MC_FPU_STACK_UNDERFLOW(0);
15657 IEM_MC_ENDIF();
15658 IEM_MC_ADVANCE_RIP();
15659
15660 IEM_MC_END();
15661 return VINF_SUCCESS;
15662}
15663
15664
15665/**
15666 * Common worker for FPU instructions working on ST0 and STn, only affecting
15667 * flags, and popping twice when done.
15668 *
15669 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15670 */
15671FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15672{
15673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15674
15675 IEM_MC_BEGIN(3, 1);
15676 IEM_MC_LOCAL(uint16_t, u16Fsw);
15677 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15678 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15679 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15680
15681 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15682 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15683
15684 IEM_MC_PREPARE_FPU_USAGE();
15685 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15686 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15687 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15688 IEM_MC_ELSE()
15689 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15690 IEM_MC_ENDIF();
15691 IEM_MC_ADVANCE_RIP();
15692
15693 IEM_MC_END();
15694 return VINF_SUCCESS;
15695}
15696
15697
15698/** Opcode 0xda 0xe9. */
15699FNIEMOP_DEF(iemOp_fucompp)
15700{
15701 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15702 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15703}
15704
15705
15706/**
15707 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15708 * the result in ST0.
15709 *
15710 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15711 */
15712FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15713{
15714 IEM_MC_BEGIN(3, 3);
15715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15716 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15717 IEM_MC_LOCAL(int32_t, i32Val2);
15718 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15719 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15720 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15721
15722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15724
15725 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15726 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15727 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15728
15729 IEM_MC_PREPARE_FPU_USAGE();
15730 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15731 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15732 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15733 IEM_MC_ELSE()
15734 IEM_MC_FPU_STACK_UNDERFLOW(0);
15735 IEM_MC_ENDIF();
15736 IEM_MC_ADVANCE_RIP();
15737
15738 IEM_MC_END();
15739 return VINF_SUCCESS;
15740}
15741
15742
15743/** Opcode 0xda !11/0. */
15744FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15745{
15746 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15747 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15748}
15749
15750
15751/** Opcode 0xda !11/1. */
15752FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15753{
15754 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15755 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15756}
15757
15758
15759/** Opcode 0xda !11/2. */
15760FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15761{
15762 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15763
15764 IEM_MC_BEGIN(3, 3);
15765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15766 IEM_MC_LOCAL(uint16_t, u16Fsw);
15767 IEM_MC_LOCAL(int32_t, i32Val2);
15768 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15769 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15770 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15771
15772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15774
15775 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15776 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15777 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15778
15779 IEM_MC_PREPARE_FPU_USAGE();
15780 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15781 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15782 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15783 IEM_MC_ELSE()
15784 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15785 IEM_MC_ENDIF();
15786 IEM_MC_ADVANCE_RIP();
15787
15788 IEM_MC_END();
15789 return VINF_SUCCESS;
15790}
15791
15792
15793/** Opcode 0xda !11/3. */
15794FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15795{
15796 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15797
15798 IEM_MC_BEGIN(3, 3);
15799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15800 IEM_MC_LOCAL(uint16_t, u16Fsw);
15801 IEM_MC_LOCAL(int32_t, i32Val2);
15802 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15803 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15804 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15805
15806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15808
15809 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15810 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15811 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15812
15813 IEM_MC_PREPARE_FPU_USAGE();
15814 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15815 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15816 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15817 IEM_MC_ELSE()
15818 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15819 IEM_MC_ENDIF();
15820 IEM_MC_ADVANCE_RIP();
15821
15822 IEM_MC_END();
15823 return VINF_SUCCESS;
15824}
15825
15826
15827/** Opcode 0xda !11/4. */
15828FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15829{
15830 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15831 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15832}
15833
15834
15835/** Opcode 0xda !11/5. */
15836FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15837{
15838 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15839 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15840}
15841
15842
15843/** Opcode 0xda !11/6. */
15844FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15845{
15846 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15847 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15848}
15849
15850
15851/** Opcode 0xda !11/7. */
15852FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15853{
15854 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15855 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15856}
15857
15858
15859/** Opcode 0xda. */
15860FNIEMOP_DEF(iemOp_EscF2)
15861{
15862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15863 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15865 {
15866 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15867 {
15868 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15869 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15870 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15871 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15872 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15873 case 5:
15874 if (bRm == 0xe9)
15875 return FNIEMOP_CALL(iemOp_fucompp);
15876 return IEMOP_RAISE_INVALID_OPCODE();
15877 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15878 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15880 }
15881 }
15882 else
15883 {
15884 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15885 {
15886 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15887 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15888 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15889 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15890 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15891 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15892 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15893 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15895 }
15896 }
15897}
15898
15899
15900/** Opcode 0xdb !11/0. */
15901FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15902{
15903 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15904
15905 IEM_MC_BEGIN(2, 3);
15906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15907 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15908 IEM_MC_LOCAL(int32_t, i32Val);
15909 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15910 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15911
15912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15914
15915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15916 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15917 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15918
15919 IEM_MC_PREPARE_FPU_USAGE();
15920 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15921 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15922 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15923 IEM_MC_ELSE()
15924 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15925 IEM_MC_ENDIF();
15926 IEM_MC_ADVANCE_RIP();
15927
15928 IEM_MC_END();
15929 return VINF_SUCCESS;
15930}
15931
15932
15933/** Opcode 0xdb !11/1. */
15934FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15935{
15936 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15937 IEM_MC_BEGIN(3, 2);
15938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15939 IEM_MC_LOCAL(uint16_t, u16Fsw);
15940 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15941 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15942 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15943
15944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15946 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15947 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15948
15949 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15950 IEM_MC_PREPARE_FPU_USAGE();
15951 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15952 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15953 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15954 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15955 IEM_MC_ELSE()
15956 IEM_MC_IF_FCW_IM()
15957 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15958 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15959 IEM_MC_ENDIF();
15960 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15961 IEM_MC_ENDIF();
15962 IEM_MC_ADVANCE_RIP();
15963
15964 IEM_MC_END();
15965 return VINF_SUCCESS;
15966}
15967
15968
15969/** Opcode 0xdb !11/2. */
15970FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15971{
15972 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15973 IEM_MC_BEGIN(3, 2);
15974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15975 IEM_MC_LOCAL(uint16_t, u16Fsw);
15976 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15977 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15978 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15979
15980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15984
15985 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15986 IEM_MC_PREPARE_FPU_USAGE();
15987 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15988 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15989 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15990 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15991 IEM_MC_ELSE()
15992 IEM_MC_IF_FCW_IM()
15993 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15994 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15995 IEM_MC_ENDIF();
15996 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15997 IEM_MC_ENDIF();
15998 IEM_MC_ADVANCE_RIP();
15999
16000 IEM_MC_END();
16001 return VINF_SUCCESS;
16002}
16003
16004
16005/** Opcode 0xdb !11/3. */
16006FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
16007{
16008 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
16009 IEM_MC_BEGIN(3, 2);
16010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16011 IEM_MC_LOCAL(uint16_t, u16Fsw);
16012 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16013 IEM_MC_ARG(int32_t *, pi32Dst, 1);
16014 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16015
16016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16018 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16019 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16020
16021 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16022 IEM_MC_PREPARE_FPU_USAGE();
16023 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16024 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
16025 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
16026 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16027 IEM_MC_ELSE()
16028 IEM_MC_IF_FCW_IM()
16029 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
16030 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
16031 IEM_MC_ENDIF();
16032 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16033 IEM_MC_ENDIF();
16034 IEM_MC_ADVANCE_RIP();
16035
16036 IEM_MC_END();
16037 return VINF_SUCCESS;
16038}
16039
16040
16041/** Opcode 0xdb !11/5. */
16042FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
16043{
16044 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
16045
16046 IEM_MC_BEGIN(2, 3);
16047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16048 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16049 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
16050 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16051 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
16052
16053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16055
16056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16058 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16059
16060 IEM_MC_PREPARE_FPU_USAGE();
16061 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16062 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
16063 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16064 IEM_MC_ELSE()
16065 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16066 IEM_MC_ENDIF();
16067 IEM_MC_ADVANCE_RIP();
16068
16069 IEM_MC_END();
16070 return VINF_SUCCESS;
16071}
16072
16073
16074/** Opcode 0xdb !11/7. */
16075FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
16076{
16077 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
16078 IEM_MC_BEGIN(3, 2);
16079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16080 IEM_MC_LOCAL(uint16_t, u16Fsw);
16081 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16082 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
16083 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16084
16085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16089
16090 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16091 IEM_MC_PREPARE_FPU_USAGE();
16092 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16093 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
16094 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
16095 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16096 IEM_MC_ELSE()
16097 IEM_MC_IF_FCW_IM()
16098 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
16099 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
16100 IEM_MC_ENDIF();
16101 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16102 IEM_MC_ENDIF();
16103 IEM_MC_ADVANCE_RIP();
16104
16105 IEM_MC_END();
16106 return VINF_SUCCESS;
16107}
16108
16109
16110/** Opcode 0xdb 11/0. */
16111FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
16112{
16113 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
16114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16115
16116 IEM_MC_BEGIN(0, 1);
16117 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16118
16119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16120 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16121
16122 IEM_MC_PREPARE_FPU_USAGE();
16123 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16124 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
16125 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16126 IEM_MC_ENDIF();
16127 IEM_MC_UPDATE_FPU_OPCODE_IP();
16128 IEM_MC_ELSE()
16129 IEM_MC_FPU_STACK_UNDERFLOW(0);
16130 IEM_MC_ENDIF();
16131 IEM_MC_ADVANCE_RIP();
16132
16133 IEM_MC_END();
16134 return VINF_SUCCESS;
16135}
16136
16137
16138/** Opcode 0xdb 11/1. */
16139FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
16140{
16141 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
16142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16143
16144 IEM_MC_BEGIN(0, 1);
16145 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16146
16147 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16148 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16149
16150 IEM_MC_PREPARE_FPU_USAGE();
16151 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16152 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
16153 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16154 IEM_MC_ENDIF();
16155 IEM_MC_UPDATE_FPU_OPCODE_IP();
16156 IEM_MC_ELSE()
16157 IEM_MC_FPU_STACK_UNDERFLOW(0);
16158 IEM_MC_ENDIF();
16159 IEM_MC_ADVANCE_RIP();
16160
16161 IEM_MC_END();
16162 return VINF_SUCCESS;
16163}
16164
16165
16166/** Opcode 0xdb 11/2. */
16167FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
16168{
16169 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
16170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16171
16172 IEM_MC_BEGIN(0, 1);
16173 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16174
16175 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16176 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16177
16178 IEM_MC_PREPARE_FPU_USAGE();
16179 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16180 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
16181 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16182 IEM_MC_ENDIF();
16183 IEM_MC_UPDATE_FPU_OPCODE_IP();
16184 IEM_MC_ELSE()
16185 IEM_MC_FPU_STACK_UNDERFLOW(0);
16186 IEM_MC_ENDIF();
16187 IEM_MC_ADVANCE_RIP();
16188
16189 IEM_MC_END();
16190 return VINF_SUCCESS;
16191}
16192
16193
16194/** Opcode 0xdb 11/3. */
16195FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
16196{
16197 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
16198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16199
16200 IEM_MC_BEGIN(0, 1);
16201 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16202
16203 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16204 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16205
16206 IEM_MC_PREPARE_FPU_USAGE();
16207 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16208 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16209 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16210 IEM_MC_ENDIF();
16211 IEM_MC_UPDATE_FPU_OPCODE_IP();
16212 IEM_MC_ELSE()
16213 IEM_MC_FPU_STACK_UNDERFLOW(0);
16214 IEM_MC_ENDIF();
16215 IEM_MC_ADVANCE_RIP();
16216
16217 IEM_MC_END();
16218 return VINF_SUCCESS;
16219}
16220
16221
16222/** Opcode 0xdb 0xe0. */
16223FNIEMOP_DEF(iemOp_fneni)
16224{
16225 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16227 IEM_MC_BEGIN(0,0);
16228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16229 IEM_MC_ADVANCE_RIP();
16230 IEM_MC_END();
16231 return VINF_SUCCESS;
16232}
16233
16234
16235/** Opcode 0xdb 0xe1. */
16236FNIEMOP_DEF(iemOp_fndisi)
16237{
16238 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16240 IEM_MC_BEGIN(0,0);
16241 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16242 IEM_MC_ADVANCE_RIP();
16243 IEM_MC_END();
16244 return VINF_SUCCESS;
16245}
16246
16247
16248/** Opcode 0xdb 0xe2. */
16249FNIEMOP_DEF(iemOp_fnclex)
16250{
16251 IEMOP_MNEMONIC(fnclex, "fnclex");
16252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16253
16254 IEM_MC_BEGIN(0,0);
16255 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16256 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16257 IEM_MC_CLEAR_FSW_EX();
16258 IEM_MC_ADVANCE_RIP();
16259 IEM_MC_END();
16260 return VINF_SUCCESS;
16261}
16262
16263
16264/** Opcode 0xdb 0xe3. */
16265FNIEMOP_DEF(iemOp_fninit)
16266{
16267 IEMOP_MNEMONIC(fninit, "fninit");
16268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16269 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16270}
16271
16272
16273/** Opcode 0xdb 0xe4. */
16274FNIEMOP_DEF(iemOp_fnsetpm)
16275{
16276 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16278 IEM_MC_BEGIN(0,0);
16279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16280 IEM_MC_ADVANCE_RIP();
16281 IEM_MC_END();
16282 return VINF_SUCCESS;
16283}
16284
16285
16286/** Opcode 0xdb 0xe5. */
16287FNIEMOP_DEF(iemOp_frstpm)
16288{
16289 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16290#if 0 /* #UDs on newer CPUs */
16291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16292 IEM_MC_BEGIN(0,0);
16293 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16294 IEM_MC_ADVANCE_RIP();
16295 IEM_MC_END();
16296 return VINF_SUCCESS;
16297#else
16298 return IEMOP_RAISE_INVALID_OPCODE();
16299#endif
16300}
16301
16302
16303/** Opcode 0xdb 11/5. */
16304FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16305{
16306 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16307 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16308}
16309
16310
16311/** Opcode 0xdb 11/6. */
16312FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16313{
16314 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16315 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16316}
16317
16318
16319/** Opcode 0xdb. */
16320FNIEMOP_DEF(iemOp_EscF3)
16321{
16322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16323 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16324 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16325 {
16326 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16327 {
16328 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16329 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16330 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16331 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16332 case 4:
16333 switch (bRm)
16334 {
16335 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16336 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16337 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16338 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16339 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16340 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16341 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16342 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16344 }
16345 break;
16346 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16347 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16348 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16350 }
16351 }
16352 else
16353 {
16354 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16355 {
16356 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16357 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16358 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16359 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16360 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16361 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16362 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16363 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16365 }
16366 }
16367}
16368
16369
16370/**
16371 * Common worker for FPU instructions working on STn and ST0, and storing the
16372 * result in STn unless IE, DE or ZE was raised.
16373 *
16374 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16375 */
16376FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16377{
16378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16379
16380 IEM_MC_BEGIN(3, 1);
16381 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16382 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16383 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16384 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16385
16386 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16387 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16388
16389 IEM_MC_PREPARE_FPU_USAGE();
16390 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16391 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16392 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16393 IEM_MC_ELSE()
16394 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16395 IEM_MC_ENDIF();
16396 IEM_MC_ADVANCE_RIP();
16397
16398 IEM_MC_END();
16399 return VINF_SUCCESS;
16400}
16401
16402
16403/** Opcode 0xdc 11/0. */
16404FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16405{
16406 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16407 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16408}
16409
16410
16411/** Opcode 0xdc 11/1. */
16412FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16413{
16414 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16415 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16416}
16417
16418
16419/** Opcode 0xdc 11/4. */
16420FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16421{
16422 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16423 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16424}
16425
16426
16427/** Opcode 0xdc 11/5. */
16428FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16429{
16430 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16431 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16432}
16433
16434
16435/** Opcode 0xdc 11/6. */
16436FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16437{
16438 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16439 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16440}
16441
16442
16443/** Opcode 0xdc 11/7. */
16444FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16445{
16446 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16447 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16448}
16449
16450
16451/**
16452 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16453 * memory operand, and storing the result in ST0.
16454 *
16455 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16456 */
16457FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16458{
16459 IEM_MC_BEGIN(3, 3);
16460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16461 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16462 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16463 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16464 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16465 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16466
16467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16469 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16470 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16471
16472 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16473 IEM_MC_PREPARE_FPU_USAGE();
16474 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16475 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16476 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16477 IEM_MC_ELSE()
16478 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16479 IEM_MC_ENDIF();
16480 IEM_MC_ADVANCE_RIP();
16481
16482 IEM_MC_END();
16483 return VINF_SUCCESS;
16484}
16485
16486
16487/** Opcode 0xdc !11/0. */
16488FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16489{
16490 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16491 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16492}
16493
16494
16495/** Opcode 0xdc !11/1. */
16496FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16497{
16498 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16499 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16500}
16501
16502
16503/** Opcode 0xdc !11/2. */
16504FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16505{
16506 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16507
16508 IEM_MC_BEGIN(3, 3);
16509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16510 IEM_MC_LOCAL(uint16_t, u16Fsw);
16511 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16512 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16513 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16514 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16515
16516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16518
16519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16520 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16521 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16522
16523 IEM_MC_PREPARE_FPU_USAGE();
16524 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16525 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16526 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16527 IEM_MC_ELSE()
16528 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16529 IEM_MC_ENDIF();
16530 IEM_MC_ADVANCE_RIP();
16531
16532 IEM_MC_END();
16533 return VINF_SUCCESS;
16534}
16535
16536
16537/** Opcode 0xdc !11/3. */
16538FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16539{
16540 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16541
16542 IEM_MC_BEGIN(3, 3);
16543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16544 IEM_MC_LOCAL(uint16_t, u16Fsw);
16545 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16546 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16547 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16548 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16549
16550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16552
16553 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16554 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16555 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16556
16557 IEM_MC_PREPARE_FPU_USAGE();
16558 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16559 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16560 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16561 IEM_MC_ELSE()
16562 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16563 IEM_MC_ENDIF();
16564 IEM_MC_ADVANCE_RIP();
16565
16566 IEM_MC_END();
16567 return VINF_SUCCESS;
16568}
16569
16570
16571/** Opcode 0xdc !11/4. */
16572FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16573{
16574 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16575 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16576}
16577
16578
16579/** Opcode 0xdc !11/5. */
16580FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16581{
16582 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16583 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16584}
16585
16586
16587/** Opcode 0xdc !11/6. */
16588FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16589{
16590 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16591 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16592}
16593
16594
16595/** Opcode 0xdc !11/7. */
16596FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16597{
16598 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16599 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16600}
16601
16602
16603/** Opcode 0xdc. */
16604FNIEMOP_DEF(iemOp_EscF4)
16605{
16606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16607 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16608 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16609 {
16610 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16611 {
16612 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16613 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16614 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16615 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16616 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16617 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16618 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16619 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16621 }
16622 }
16623 else
16624 {
16625 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16626 {
16627 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16628 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16629 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16630 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16631 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16632 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16633 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16634 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16636 }
16637 }
16638}
16639
16640
16641/** Opcode 0xdd !11/0.
16642 * @sa iemOp_fld_m32r */
16643FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16644{
16645 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16646
16647 IEM_MC_BEGIN(2, 3);
16648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16649 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16650 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16651 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16652 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16653
16654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16656 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16657 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16658
16659 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16660 IEM_MC_PREPARE_FPU_USAGE();
16661 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16662 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16663 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16664 IEM_MC_ELSE()
16665 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16666 IEM_MC_ENDIF();
16667 IEM_MC_ADVANCE_RIP();
16668
16669 IEM_MC_END();
16670 return VINF_SUCCESS;
16671}
16672
16673
16674/** Opcode 0xdd !11/0. */
16675FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16676{
16677 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16678 IEM_MC_BEGIN(3, 2);
16679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16680 IEM_MC_LOCAL(uint16_t, u16Fsw);
16681 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16682 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16683 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16684
16685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16687 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16688 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16689
16690 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16691 IEM_MC_PREPARE_FPU_USAGE();
16692 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16693 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16694 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16695 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16696 IEM_MC_ELSE()
16697 IEM_MC_IF_FCW_IM()
16698 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16699 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16700 IEM_MC_ENDIF();
16701 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16702 IEM_MC_ENDIF();
16703 IEM_MC_ADVANCE_RIP();
16704
16705 IEM_MC_END();
16706 return VINF_SUCCESS;
16707}
16708
16709
16710/** Opcode 0xdd !11/0. */
16711FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16712{
16713 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16714 IEM_MC_BEGIN(3, 2);
16715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16716 IEM_MC_LOCAL(uint16_t, u16Fsw);
16717 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16718 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16719 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16720
16721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16724 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16725
16726 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16727 IEM_MC_PREPARE_FPU_USAGE();
16728 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16729 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16730 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16731 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16732 IEM_MC_ELSE()
16733 IEM_MC_IF_FCW_IM()
16734 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16735 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16736 IEM_MC_ENDIF();
16737 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16738 IEM_MC_ENDIF();
16739 IEM_MC_ADVANCE_RIP();
16740
16741 IEM_MC_END();
16742 return VINF_SUCCESS;
16743}
16744
16745
16746
16747
16748/** Opcode 0xdd !11/0. */
16749FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16750{
16751 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16752 IEM_MC_BEGIN(3, 2);
16753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16754 IEM_MC_LOCAL(uint16_t, u16Fsw);
16755 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16756 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16757 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16758
16759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16761 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16762 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16763
16764 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16765 IEM_MC_PREPARE_FPU_USAGE();
16766 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16767 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16768 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16769 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16770 IEM_MC_ELSE()
16771 IEM_MC_IF_FCW_IM()
16772 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16773 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16774 IEM_MC_ENDIF();
16775 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16776 IEM_MC_ENDIF();
16777 IEM_MC_ADVANCE_RIP();
16778
16779 IEM_MC_END();
16780 return VINF_SUCCESS;
16781}
16782
16783
16784/** Opcode 0xdd !11/0. */
16785FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16786{
16787 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16788 IEM_MC_BEGIN(3, 0);
16789 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16790 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16791 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16795 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16796 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16797 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16798 IEM_MC_END();
16799 return VINF_SUCCESS;
16800}
16801
16802
16803/** Opcode 0xdd !11/0. */
16804FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16805{
16806 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16807 IEM_MC_BEGIN(3, 0);
16808 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16809 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16810 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16813 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16814 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16815 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16816 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16817 IEM_MC_END();
16818 return VINF_SUCCESS;
16819
16820}
16821
16822/** Opcode 0xdd !11/0. */
16823FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16824{
16825 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16826
16827 IEM_MC_BEGIN(0, 2);
16828 IEM_MC_LOCAL(uint16_t, u16Tmp);
16829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16830
16831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16833 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16834
16835 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16836 IEM_MC_FETCH_FSW(u16Tmp);
16837 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16838 IEM_MC_ADVANCE_RIP();
16839
16840/** @todo Debug / drop a hint to the verifier that things may differ
16841 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16842 * NT4SP1. (X86_FSW_PE) */
16843 IEM_MC_END();
16844 return VINF_SUCCESS;
16845}
16846
16847
16848/** Opcode 0xdd 11/0. */
16849FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16850{
16851 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16853 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16854 unmodified. */
16855
16856 IEM_MC_BEGIN(0, 0);
16857
16858 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16859 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16860
16861 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16862 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16863 IEM_MC_UPDATE_FPU_OPCODE_IP();
16864
16865 IEM_MC_ADVANCE_RIP();
16866 IEM_MC_END();
16867 return VINF_SUCCESS;
16868}
16869
16870
16871/** Opcode 0xdd 11/1. */
16872FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16873{
16874 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16876
16877 IEM_MC_BEGIN(0, 2);
16878 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16879 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16881 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16882
16883 IEM_MC_PREPARE_FPU_USAGE();
16884 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16885 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16886 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16887 IEM_MC_ELSE()
16888 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16889 IEM_MC_ENDIF();
16890
16891 IEM_MC_ADVANCE_RIP();
16892 IEM_MC_END();
16893 return VINF_SUCCESS;
16894}
16895
16896
16897/** Opcode 0xdd 11/3. */
16898FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16899{
16900 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16901 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16902}
16903
16904
16905/** Opcode 0xdd 11/4. */
16906FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16907{
16908 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16909 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16910}
16911
16912
16913/** Opcode 0xdd. */
16914FNIEMOP_DEF(iemOp_EscF5)
16915{
16916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16917 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16918 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16919 {
16920 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16921 {
16922 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16923 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16924 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16925 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16926 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16927 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16928 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16929 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16931 }
16932 }
16933 else
16934 {
16935 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16936 {
16937 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16938 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16939 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16940 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16941 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16942 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16943 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16944 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16946 }
16947 }
16948}
16949
16950
16951/** Opcode 0xde 11/0. */
16952FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16953{
16954 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16955 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16956}
16957
16958
16959/** Opcode 0xde 11/0. */
16960FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16961{
16962 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16963 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16964}
16965
16966
16967/** Opcode 0xde 0xd9. */
16968FNIEMOP_DEF(iemOp_fcompp)
16969{
16970 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16971 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16972}
16973
16974
16975/** Opcode 0xde 11/4. */
16976FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16977{
16978 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16979 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16980}
16981
16982
16983/** Opcode 0xde 11/5. */
16984FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16985{
16986 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16987 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16988}
16989
16990
16991/** Opcode 0xde 11/6. */
16992FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16993{
16994 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16995 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16996}
16997
16998
16999/** Opcode 0xde 11/7. */
17000FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
17001{
17002 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
17003 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
17004}
17005
17006
17007/**
17008 * Common worker for FPU instructions working on ST0 and an m16i, and storing
17009 * the result in ST0.
17010 *
17011 * @param pfnAImpl Pointer to the instruction implementation (assembly).
17012 */
17013FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
17014{
17015 IEM_MC_BEGIN(3, 3);
17016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17017 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17018 IEM_MC_LOCAL(int16_t, i16Val2);
17019 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17020 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17021 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17022
17023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17025
17026 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17027 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17028 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17029
17030 IEM_MC_PREPARE_FPU_USAGE();
17031 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17032 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
17033 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
17034 IEM_MC_ELSE()
17035 IEM_MC_FPU_STACK_UNDERFLOW(0);
17036 IEM_MC_ENDIF();
17037 IEM_MC_ADVANCE_RIP();
17038
17039 IEM_MC_END();
17040 return VINF_SUCCESS;
17041}
17042
17043
17044/** Opcode 0xde !11/0. */
17045FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
17046{
17047 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
17048 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
17049}
17050
17051
17052/** Opcode 0xde !11/1. */
17053FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
17054{
17055 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
17056 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
17057}
17058
17059
17060/** Opcode 0xde !11/2. */
17061FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
17062{
17063 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
17064
17065 IEM_MC_BEGIN(3, 3);
17066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17067 IEM_MC_LOCAL(uint16_t, u16Fsw);
17068 IEM_MC_LOCAL(int16_t, i16Val2);
17069 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17070 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17071 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17072
17073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17075
17076 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17077 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17078 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17079
17080 IEM_MC_PREPARE_FPU_USAGE();
17081 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17082 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
17083 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17084 IEM_MC_ELSE()
17085 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17086 IEM_MC_ENDIF();
17087 IEM_MC_ADVANCE_RIP();
17088
17089 IEM_MC_END();
17090 return VINF_SUCCESS;
17091}
17092
17093
17094/** Opcode 0xde !11/3. */
17095FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
17096{
17097 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
17098
17099 IEM_MC_BEGIN(3, 3);
17100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17101 IEM_MC_LOCAL(uint16_t, u16Fsw);
17102 IEM_MC_LOCAL(int16_t, i16Val2);
17103 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17104 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17105 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17106
17107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17109
17110 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17111 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17112 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17113
17114 IEM_MC_PREPARE_FPU_USAGE();
17115 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17116 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
17117 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17118 IEM_MC_ELSE()
17119 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17120 IEM_MC_ENDIF();
17121 IEM_MC_ADVANCE_RIP();
17122
17123 IEM_MC_END();
17124 return VINF_SUCCESS;
17125}
17126
17127
17128/** Opcode 0xde !11/4. */
17129FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
17130{
17131 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
17132 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
17133}
17134
17135
17136/** Opcode 0xde !11/5. */
17137FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
17138{
17139 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
17140 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
17141}
17142
17143
17144/** Opcode 0xde !11/6. */
17145FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
17146{
17147 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
17148 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
17149}
17150
17151
17152/** Opcode 0xde !11/7. */
17153FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
17154{
17155 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
17156 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
17157}
17158
17159
17160/** Opcode 0xde. */
17161FNIEMOP_DEF(iemOp_EscF6)
17162{
17163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17164 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
17165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17166 {
17167 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17168 {
17169 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
17170 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
17171 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
17172 case 3: if (bRm == 0xd9)
17173 return FNIEMOP_CALL(iemOp_fcompp);
17174 return IEMOP_RAISE_INVALID_OPCODE();
17175 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
17176 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
17177 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
17178 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
17179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17180 }
17181 }
17182 else
17183 {
17184 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17185 {
17186 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
17187 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
17188 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
17189 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
17190 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
17191 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
17192 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
17193 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
17194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17195 }
17196 }
17197}
17198
17199
17200/** Opcode 0xdf 11/0.
17201 * Undocument instruction, assumed to work like ffree + fincstp. */
17202FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17203{
17204 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17206
17207 IEM_MC_BEGIN(0, 0);
17208
17209 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17210 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17211
17212 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17213 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17214 IEM_MC_FPU_STACK_INC_TOP();
17215 IEM_MC_UPDATE_FPU_OPCODE_IP();
17216
17217 IEM_MC_ADVANCE_RIP();
17218 IEM_MC_END();
17219 return VINF_SUCCESS;
17220}
17221
17222
17223/** Opcode 0xdf 0xe0. */
17224FNIEMOP_DEF(iemOp_fnstsw_ax)
17225{
17226 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17228
17229 IEM_MC_BEGIN(0, 1);
17230 IEM_MC_LOCAL(uint16_t, u16Tmp);
17231 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17233 IEM_MC_FETCH_FSW(u16Tmp);
17234 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17235 IEM_MC_ADVANCE_RIP();
17236 IEM_MC_END();
17237 return VINF_SUCCESS;
17238}
17239
17240
17241/** Opcode 0xdf 11/5. */
17242FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17243{
17244 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17245 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17246}
17247
17248
17249/** Opcode 0xdf 11/6. */
17250FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17251{
17252 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17253 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17254}
17255
17256
17257/** Opcode 0xdf !11/0. */
17258FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17259{
17260 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17261
17262 IEM_MC_BEGIN(2, 3);
17263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17264 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17265 IEM_MC_LOCAL(int16_t, i16Val);
17266 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17267 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17268
17269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17271
17272 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17273 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17274 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17275
17276 IEM_MC_PREPARE_FPU_USAGE();
17277 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17278 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17279 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17280 IEM_MC_ELSE()
17281 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17282 IEM_MC_ENDIF();
17283 IEM_MC_ADVANCE_RIP();
17284
17285 IEM_MC_END();
17286 return VINF_SUCCESS;
17287}
17288
17289
17290/** Opcode 0xdf !11/1. */
17291FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17292{
17293 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17294 IEM_MC_BEGIN(3, 2);
17295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17296 IEM_MC_LOCAL(uint16_t, u16Fsw);
17297 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17298 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17299 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17300
17301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17305
17306 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17307 IEM_MC_PREPARE_FPU_USAGE();
17308 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17309 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17310 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17311 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17312 IEM_MC_ELSE()
17313 IEM_MC_IF_FCW_IM()
17314 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17315 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17316 IEM_MC_ENDIF();
17317 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17318 IEM_MC_ENDIF();
17319 IEM_MC_ADVANCE_RIP();
17320
17321 IEM_MC_END();
17322 return VINF_SUCCESS;
17323}
17324
17325
17326/** Opcode 0xdf !11/2. */
17327FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17328{
17329 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17330 IEM_MC_BEGIN(3, 2);
17331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17332 IEM_MC_LOCAL(uint16_t, u16Fsw);
17333 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17334 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17335 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17336
17337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17341
17342 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17343 IEM_MC_PREPARE_FPU_USAGE();
17344 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17345 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17346 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17347 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17348 IEM_MC_ELSE()
17349 IEM_MC_IF_FCW_IM()
17350 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17351 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17352 IEM_MC_ENDIF();
17353 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17354 IEM_MC_ENDIF();
17355 IEM_MC_ADVANCE_RIP();
17356
17357 IEM_MC_END();
17358 return VINF_SUCCESS;
17359}
17360
17361
17362/** Opcode 0xdf !11/3. */
17363FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17364{
17365 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17366 IEM_MC_BEGIN(3, 2);
17367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17368 IEM_MC_LOCAL(uint16_t, u16Fsw);
17369 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17370 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17371 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17372
17373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17377
17378 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17379 IEM_MC_PREPARE_FPU_USAGE();
17380 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17381 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17382 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17383 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17384 IEM_MC_ELSE()
17385 IEM_MC_IF_FCW_IM()
17386 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17387 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17388 IEM_MC_ENDIF();
17389 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17390 IEM_MC_ENDIF();
17391 IEM_MC_ADVANCE_RIP();
17392
17393 IEM_MC_END();
17394 return VINF_SUCCESS;
17395}
17396
17397
17398/** Opcode 0xdf !11/4. */
17399FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17400
17401
17402/** Opcode 0xdf !11/5. */
17403FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17404{
17405 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17406
17407 IEM_MC_BEGIN(2, 3);
17408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17409 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17410 IEM_MC_LOCAL(int64_t, i64Val);
17411 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17412 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17413
17414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17416
17417 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17418 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17419 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17420
17421 IEM_MC_PREPARE_FPU_USAGE();
17422 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17423 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17424 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17425 IEM_MC_ELSE()
17426 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17427 IEM_MC_ENDIF();
17428 IEM_MC_ADVANCE_RIP();
17429
17430 IEM_MC_END();
17431 return VINF_SUCCESS;
17432}
17433
17434
17435/** Opcode 0xdf !11/6. */
17436FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17437
17438
17439/** Opcode 0xdf !11/7. */
17440FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17441{
17442 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17443 IEM_MC_BEGIN(3, 2);
17444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17445 IEM_MC_LOCAL(uint16_t, u16Fsw);
17446 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17447 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17448 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17449
17450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17454
17455 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17456 IEM_MC_PREPARE_FPU_USAGE();
17457 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17458 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17459 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17460 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17461 IEM_MC_ELSE()
17462 IEM_MC_IF_FCW_IM()
17463 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17464 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17465 IEM_MC_ENDIF();
17466 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17467 IEM_MC_ENDIF();
17468 IEM_MC_ADVANCE_RIP();
17469
17470 IEM_MC_END();
17471 return VINF_SUCCESS;
17472}
17473
17474
17475/** Opcode 0xdf. */
17476FNIEMOP_DEF(iemOp_EscF7)
17477{
17478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17480 {
17481 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17482 {
17483 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17484 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17485 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17486 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17487 case 4: if (bRm == 0xe0)
17488 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17489 return IEMOP_RAISE_INVALID_OPCODE();
17490 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17491 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17492 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17494 }
17495 }
17496 else
17497 {
17498 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17499 {
17500 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17501 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17502 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17503 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17504 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17505 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17506 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17507 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17509 }
17510 }
17511}
17512
17513
17514/** Opcode 0xe0. */
17515FNIEMOP_DEF(iemOp_loopne_Jb)
17516{
17517 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17518 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17520 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17521
17522 switch (pVCpu->iem.s.enmEffAddrMode)
17523 {
17524 case IEMMODE_16BIT:
17525 IEM_MC_BEGIN(0,0);
17526 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17527 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17528 IEM_MC_REL_JMP_S8(i8Imm);
17529 } IEM_MC_ELSE() {
17530 IEM_MC_ADVANCE_RIP();
17531 } IEM_MC_ENDIF();
17532 IEM_MC_END();
17533 return VINF_SUCCESS;
17534
17535 case IEMMODE_32BIT:
17536 IEM_MC_BEGIN(0,0);
17537 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17538 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17539 IEM_MC_REL_JMP_S8(i8Imm);
17540 } IEM_MC_ELSE() {
17541 IEM_MC_ADVANCE_RIP();
17542 } IEM_MC_ENDIF();
17543 IEM_MC_END();
17544 return VINF_SUCCESS;
17545
17546 case IEMMODE_64BIT:
17547 IEM_MC_BEGIN(0,0);
17548 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17549 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17550 IEM_MC_REL_JMP_S8(i8Imm);
17551 } IEM_MC_ELSE() {
17552 IEM_MC_ADVANCE_RIP();
17553 } IEM_MC_ENDIF();
17554 IEM_MC_END();
17555 return VINF_SUCCESS;
17556
17557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17558 }
17559}
17560
17561
17562/** Opcode 0xe1. */
17563FNIEMOP_DEF(iemOp_loope_Jb)
17564{
17565 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17566 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17568 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17569
17570 switch (pVCpu->iem.s.enmEffAddrMode)
17571 {
17572 case IEMMODE_16BIT:
17573 IEM_MC_BEGIN(0,0);
17574 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17575 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17576 IEM_MC_REL_JMP_S8(i8Imm);
17577 } IEM_MC_ELSE() {
17578 IEM_MC_ADVANCE_RIP();
17579 } IEM_MC_ENDIF();
17580 IEM_MC_END();
17581 return VINF_SUCCESS;
17582
17583 case IEMMODE_32BIT:
17584 IEM_MC_BEGIN(0,0);
17585 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17586 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17587 IEM_MC_REL_JMP_S8(i8Imm);
17588 } IEM_MC_ELSE() {
17589 IEM_MC_ADVANCE_RIP();
17590 } IEM_MC_ENDIF();
17591 IEM_MC_END();
17592 return VINF_SUCCESS;
17593
17594 case IEMMODE_64BIT:
17595 IEM_MC_BEGIN(0,0);
17596 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17597 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17598 IEM_MC_REL_JMP_S8(i8Imm);
17599 } IEM_MC_ELSE() {
17600 IEM_MC_ADVANCE_RIP();
17601 } IEM_MC_ENDIF();
17602 IEM_MC_END();
17603 return VINF_SUCCESS;
17604
17605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17606 }
17607}
17608
17609
17610/** Opcode 0xe2. */
17611FNIEMOP_DEF(iemOp_loop_Jb)
17612{
17613 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17614 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17617
17618 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17619 * using the 32-bit operand size override. How can that be restarted? See
17620 * weird pseudo code in intel manual. */
17621 switch (pVCpu->iem.s.enmEffAddrMode)
17622 {
17623 case IEMMODE_16BIT:
17624 IEM_MC_BEGIN(0,0);
17625 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17626 {
17627 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17628 IEM_MC_IF_CX_IS_NZ() {
17629 IEM_MC_REL_JMP_S8(i8Imm);
17630 } IEM_MC_ELSE() {
17631 IEM_MC_ADVANCE_RIP();
17632 } IEM_MC_ENDIF();
17633 }
17634 else
17635 {
17636 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17637 IEM_MC_ADVANCE_RIP();
17638 }
17639 IEM_MC_END();
17640 return VINF_SUCCESS;
17641
17642 case IEMMODE_32BIT:
17643 IEM_MC_BEGIN(0,0);
17644 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17645 {
17646 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17647 IEM_MC_IF_ECX_IS_NZ() {
17648 IEM_MC_REL_JMP_S8(i8Imm);
17649 } IEM_MC_ELSE() {
17650 IEM_MC_ADVANCE_RIP();
17651 } IEM_MC_ENDIF();
17652 }
17653 else
17654 {
17655 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17656 IEM_MC_ADVANCE_RIP();
17657 }
17658 IEM_MC_END();
17659 return VINF_SUCCESS;
17660
17661 case IEMMODE_64BIT:
17662 IEM_MC_BEGIN(0,0);
17663 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17664 {
17665 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17666 IEM_MC_IF_RCX_IS_NZ() {
17667 IEM_MC_REL_JMP_S8(i8Imm);
17668 } IEM_MC_ELSE() {
17669 IEM_MC_ADVANCE_RIP();
17670 } IEM_MC_ENDIF();
17671 }
17672 else
17673 {
17674 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17675 IEM_MC_ADVANCE_RIP();
17676 }
17677 IEM_MC_END();
17678 return VINF_SUCCESS;
17679
17680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17681 }
17682}
17683
17684
17685/** Opcode 0xe3. */
17686FNIEMOP_DEF(iemOp_jecxz_Jb)
17687{
17688 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17689 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17691 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17692
17693 switch (pVCpu->iem.s.enmEffAddrMode)
17694 {
17695 case IEMMODE_16BIT:
17696 IEM_MC_BEGIN(0,0);
17697 IEM_MC_IF_CX_IS_NZ() {
17698 IEM_MC_ADVANCE_RIP();
17699 } IEM_MC_ELSE() {
17700 IEM_MC_REL_JMP_S8(i8Imm);
17701 } IEM_MC_ENDIF();
17702 IEM_MC_END();
17703 return VINF_SUCCESS;
17704
17705 case IEMMODE_32BIT:
17706 IEM_MC_BEGIN(0,0);
17707 IEM_MC_IF_ECX_IS_NZ() {
17708 IEM_MC_ADVANCE_RIP();
17709 } IEM_MC_ELSE() {
17710 IEM_MC_REL_JMP_S8(i8Imm);
17711 } IEM_MC_ENDIF();
17712 IEM_MC_END();
17713 return VINF_SUCCESS;
17714
17715 case IEMMODE_64BIT:
17716 IEM_MC_BEGIN(0,0);
17717 IEM_MC_IF_RCX_IS_NZ() {
17718 IEM_MC_ADVANCE_RIP();
17719 } IEM_MC_ELSE() {
17720 IEM_MC_REL_JMP_S8(i8Imm);
17721 } IEM_MC_ENDIF();
17722 IEM_MC_END();
17723 return VINF_SUCCESS;
17724
17725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17726 }
17727}
17728
17729
17730/** Opcode 0xe4 */
17731FNIEMOP_DEF(iemOp_in_AL_Ib)
17732{
17733 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17734 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17736 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17737}
17738
17739
17740/** Opcode 0xe5 */
17741FNIEMOP_DEF(iemOp_in_eAX_Ib)
17742{
17743 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17744 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17746 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17747}
17748
17749
17750/** Opcode 0xe6 */
17751FNIEMOP_DEF(iemOp_out_Ib_AL)
17752{
17753 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17754 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17756 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17757}
17758
17759
17760/** Opcode 0xe7 */
17761FNIEMOP_DEF(iemOp_out_Ib_eAX)
17762{
17763 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17764 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17766 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17767}
17768
17769
17770/** Opcode 0xe8. */
17771FNIEMOP_DEF(iemOp_call_Jv)
17772{
17773 IEMOP_MNEMONIC(call_Jv, "call Jv");
17774 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17775 switch (pVCpu->iem.s.enmEffOpSize)
17776 {
17777 case IEMMODE_16BIT:
17778 {
17779 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17780 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17781 }
17782
17783 case IEMMODE_32BIT:
17784 {
17785 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17786 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17787 }
17788
17789 case IEMMODE_64BIT:
17790 {
17791 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17792 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17793 }
17794
17795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17796 }
17797}
17798
17799
17800/** Opcode 0xe9. */
17801FNIEMOP_DEF(iemOp_jmp_Jv)
17802{
17803 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17805 switch (pVCpu->iem.s.enmEffOpSize)
17806 {
17807 case IEMMODE_16BIT:
17808 {
17809 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17810 IEM_MC_BEGIN(0, 0);
17811 IEM_MC_REL_JMP_S16(i16Imm);
17812 IEM_MC_END();
17813 return VINF_SUCCESS;
17814 }
17815
17816 case IEMMODE_64BIT:
17817 case IEMMODE_32BIT:
17818 {
17819 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17820 IEM_MC_BEGIN(0, 0);
17821 IEM_MC_REL_JMP_S32(i32Imm);
17822 IEM_MC_END();
17823 return VINF_SUCCESS;
17824 }
17825
17826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17827 }
17828}
17829
17830
17831/** Opcode 0xea. */
17832FNIEMOP_DEF(iemOp_jmp_Ap)
17833{
17834 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17835 IEMOP_HLP_NO_64BIT();
17836
17837 /* Decode the far pointer address and pass it on to the far call C implementation. */
17838 uint32_t offSeg;
17839 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17840 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17841 else
17842 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17843 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17845 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17846}
17847
17848
17849/** Opcode 0xeb. */
17850FNIEMOP_DEF(iemOp_jmp_Jb)
17851{
17852 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17853 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17855 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17856
17857 IEM_MC_BEGIN(0, 0);
17858 IEM_MC_REL_JMP_S8(i8Imm);
17859 IEM_MC_END();
17860 return VINF_SUCCESS;
17861}
17862
17863
17864/** Opcode 0xec */
17865FNIEMOP_DEF(iemOp_in_AL_DX)
17866{
17867 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17869 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17870}
17871
17872
17873/** Opcode 0xed */
17874FNIEMOP_DEF(iemOp_eAX_DX)
17875{
17876 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17878 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17879}
17880
17881
17882/** Opcode 0xee */
17883FNIEMOP_DEF(iemOp_out_DX_AL)
17884{
17885 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17887 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17888}
17889
17890
17891/** Opcode 0xef */
17892FNIEMOP_DEF(iemOp_out_DX_eAX)
17893{
17894 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17896 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17897}
17898
17899
17900/** Opcode 0xf0. */
17901FNIEMOP_DEF(iemOp_lock)
17902{
17903 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17904 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17905
17906 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17907 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17908}
17909
17910
17911/** Opcode 0xf1. */
17912FNIEMOP_DEF(iemOp_int_1)
17913{
17914 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17915 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17916 /** @todo testcase! */
17917 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17918}
17919
17920
17921/** Opcode 0xf2. */
17922FNIEMOP_DEF(iemOp_repne)
17923{
17924 /* This overrides any previous REPE prefix. */
17925 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17926 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17927 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17928
17929 /* For the 4 entry opcode tables, REPNZ overrides any previous
17930 REPZ and operand size prefixes. */
17931 pVCpu->iem.s.idxPrefix = 3;
17932
17933 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17934 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17935}
17936
17937
17938/** Opcode 0xf3. */
17939FNIEMOP_DEF(iemOp_repe)
17940{
17941 /* This overrides any previous REPNE prefix. */
17942 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17943 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17944 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17945
17946 /* For the 4 entry opcode tables, REPNZ overrides any previous
17947 REPNZ and operand size prefixes. */
17948 pVCpu->iem.s.idxPrefix = 2;
17949
17950 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17951 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17952}
17953
17954
17955/** Opcode 0xf4. */
17956FNIEMOP_DEF(iemOp_hlt)
17957{
17958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17959 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17960}
17961
17962
17963/** Opcode 0xf5. */
17964FNIEMOP_DEF(iemOp_cmc)
17965{
17966 IEMOP_MNEMONIC(cmc, "cmc");
17967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17968 IEM_MC_BEGIN(0, 0);
17969 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17970 IEM_MC_ADVANCE_RIP();
17971 IEM_MC_END();
17972 return VINF_SUCCESS;
17973}
17974
17975
17976/**
17977 * Common implementation of 'inc/dec/not/neg Eb'.
17978 *
17979 * @param bRm The RM byte.
17980 * @param pImpl The instruction implementation.
17981 */
17982FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17983{
17984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17985 {
17986 /* register access */
17987 IEM_MC_BEGIN(2, 0);
17988 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17989 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17990 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17991 IEM_MC_REF_EFLAGS(pEFlags);
17992 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17993 IEM_MC_ADVANCE_RIP();
17994 IEM_MC_END();
17995 }
17996 else
17997 {
17998 /* memory access. */
17999 IEM_MC_BEGIN(2, 2);
18000 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18001 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18003
18004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18005 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18006 IEM_MC_FETCH_EFLAGS(EFlags);
18007 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18008 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
18009 else
18010 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
18011
18012 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
18013 IEM_MC_COMMIT_EFLAGS(EFlags);
18014 IEM_MC_ADVANCE_RIP();
18015 IEM_MC_END();
18016 }
18017 return VINF_SUCCESS;
18018}
18019
18020
18021/**
18022 * Common implementation of 'inc/dec/not/neg Ev'.
18023 *
18024 * @param bRm The RM byte.
18025 * @param pImpl The instruction implementation.
18026 */
18027FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
18028{
18029 /* Registers are handled by a common worker. */
18030 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18031 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18032
18033 /* Memory we do here. */
18034 switch (pVCpu->iem.s.enmEffOpSize)
18035 {
18036 case IEMMODE_16BIT:
18037 IEM_MC_BEGIN(2, 2);
18038 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18039 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18041
18042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18043 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18044 IEM_MC_FETCH_EFLAGS(EFlags);
18045 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18046 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
18047 else
18048 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
18049
18050 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
18051 IEM_MC_COMMIT_EFLAGS(EFlags);
18052 IEM_MC_ADVANCE_RIP();
18053 IEM_MC_END();
18054 return VINF_SUCCESS;
18055
18056 case IEMMODE_32BIT:
18057 IEM_MC_BEGIN(2, 2);
18058 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18059 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18061
18062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18063 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18064 IEM_MC_FETCH_EFLAGS(EFlags);
18065 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18066 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
18067 else
18068 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
18069
18070 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
18071 IEM_MC_COMMIT_EFLAGS(EFlags);
18072 IEM_MC_ADVANCE_RIP();
18073 IEM_MC_END();
18074 return VINF_SUCCESS;
18075
18076 case IEMMODE_64BIT:
18077 IEM_MC_BEGIN(2, 2);
18078 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18079 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18081
18082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18083 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18084 IEM_MC_FETCH_EFLAGS(EFlags);
18085 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18086 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
18087 else
18088 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
18089
18090 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
18091 IEM_MC_COMMIT_EFLAGS(EFlags);
18092 IEM_MC_ADVANCE_RIP();
18093 IEM_MC_END();
18094 return VINF_SUCCESS;
18095
18096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18097 }
18098}
18099
18100
18101/** Opcode 0xf6 /0. */
18102FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
18103{
18104 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
18105 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18106
18107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18108 {
18109 /* register access */
18110 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
18111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18112
18113 IEM_MC_BEGIN(3, 0);
18114 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18115 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
18116 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18117 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18118 IEM_MC_REF_EFLAGS(pEFlags);
18119 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
18120 IEM_MC_ADVANCE_RIP();
18121 IEM_MC_END();
18122 }
18123 else
18124 {
18125 /* memory access. */
18126 IEM_MC_BEGIN(3, 2);
18127 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18128 IEM_MC_ARG(uint8_t, u8Src, 1);
18129 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18131
18132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
18133 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
18134 IEM_MC_ASSIGN(u8Src, u8Imm);
18135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18136 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18137 IEM_MC_FETCH_EFLAGS(EFlags);
18138 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
18139
18140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
18141 IEM_MC_COMMIT_EFLAGS(EFlags);
18142 IEM_MC_ADVANCE_RIP();
18143 IEM_MC_END();
18144 }
18145 return VINF_SUCCESS;
18146}
18147
18148
18149/** Opcode 0xf7 /0. */
18150FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
18151{
18152 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
18153 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18154
18155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18156 {
18157 /* register access */
18158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18159 switch (pVCpu->iem.s.enmEffOpSize)
18160 {
18161 case IEMMODE_16BIT:
18162 {
18163 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18164 IEM_MC_BEGIN(3, 0);
18165 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18166 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
18167 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18168 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18169 IEM_MC_REF_EFLAGS(pEFlags);
18170 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18171 IEM_MC_ADVANCE_RIP();
18172 IEM_MC_END();
18173 return VINF_SUCCESS;
18174 }
18175
18176 case IEMMODE_32BIT:
18177 {
18178 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18179 IEM_MC_BEGIN(3, 0);
18180 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18181 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
18182 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18183 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18184 IEM_MC_REF_EFLAGS(pEFlags);
18185 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18186 /* No clearing the high dword here - test doesn't write back the result. */
18187 IEM_MC_ADVANCE_RIP();
18188 IEM_MC_END();
18189 return VINF_SUCCESS;
18190 }
18191
18192 case IEMMODE_64BIT:
18193 {
18194 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18195 IEM_MC_BEGIN(3, 0);
18196 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18197 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
18198 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18199 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18200 IEM_MC_REF_EFLAGS(pEFlags);
18201 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18202 IEM_MC_ADVANCE_RIP();
18203 IEM_MC_END();
18204 return VINF_SUCCESS;
18205 }
18206
18207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18208 }
18209 }
18210 else
18211 {
18212 /* memory access. */
18213 switch (pVCpu->iem.s.enmEffOpSize)
18214 {
18215 case IEMMODE_16BIT:
18216 {
18217 IEM_MC_BEGIN(3, 2);
18218 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18219 IEM_MC_ARG(uint16_t, u16Src, 1);
18220 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18222
18223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18224 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18225 IEM_MC_ASSIGN(u16Src, u16Imm);
18226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18227 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18228 IEM_MC_FETCH_EFLAGS(EFlags);
18229 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18230
18231 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18232 IEM_MC_COMMIT_EFLAGS(EFlags);
18233 IEM_MC_ADVANCE_RIP();
18234 IEM_MC_END();
18235 return VINF_SUCCESS;
18236 }
18237
18238 case IEMMODE_32BIT:
18239 {
18240 IEM_MC_BEGIN(3, 2);
18241 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18242 IEM_MC_ARG(uint32_t, u32Src, 1);
18243 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18245
18246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18247 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18248 IEM_MC_ASSIGN(u32Src, u32Imm);
18249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18250 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18251 IEM_MC_FETCH_EFLAGS(EFlags);
18252 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18253
18254 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18255 IEM_MC_COMMIT_EFLAGS(EFlags);
18256 IEM_MC_ADVANCE_RIP();
18257 IEM_MC_END();
18258 return VINF_SUCCESS;
18259 }
18260
18261 case IEMMODE_64BIT:
18262 {
18263 IEM_MC_BEGIN(3, 2);
18264 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18265 IEM_MC_ARG(uint64_t, u64Src, 1);
18266 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18268
18269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18270 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18271 IEM_MC_ASSIGN(u64Src, u64Imm);
18272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18273 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18274 IEM_MC_FETCH_EFLAGS(EFlags);
18275 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18276
18277 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18278 IEM_MC_COMMIT_EFLAGS(EFlags);
18279 IEM_MC_ADVANCE_RIP();
18280 IEM_MC_END();
18281 return VINF_SUCCESS;
18282 }
18283
18284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18285 }
18286 }
18287}
18288
18289
18290/** Opcode 0xf6 /4, /5, /6 and /7. */
18291FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18292{
18293 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18294 {
18295 /* register access */
18296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18297 IEM_MC_BEGIN(3, 1);
18298 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18299 IEM_MC_ARG(uint8_t, u8Value, 1);
18300 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18301 IEM_MC_LOCAL(int32_t, rc);
18302
18303 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18304 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18305 IEM_MC_REF_EFLAGS(pEFlags);
18306 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18307 IEM_MC_IF_LOCAL_IS_Z(rc) {
18308 IEM_MC_ADVANCE_RIP();
18309 } IEM_MC_ELSE() {
18310 IEM_MC_RAISE_DIVIDE_ERROR();
18311 } IEM_MC_ENDIF();
18312
18313 IEM_MC_END();
18314 }
18315 else
18316 {
18317 /* memory access. */
18318 IEM_MC_BEGIN(3, 2);
18319 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18320 IEM_MC_ARG(uint8_t, u8Value, 1);
18321 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18323 IEM_MC_LOCAL(int32_t, rc);
18324
18325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18327 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18328 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18329 IEM_MC_REF_EFLAGS(pEFlags);
18330 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18331 IEM_MC_IF_LOCAL_IS_Z(rc) {
18332 IEM_MC_ADVANCE_RIP();
18333 } IEM_MC_ELSE() {
18334 IEM_MC_RAISE_DIVIDE_ERROR();
18335 } IEM_MC_ENDIF();
18336
18337 IEM_MC_END();
18338 }
18339 return VINF_SUCCESS;
18340}
18341
18342
18343/** Opcode 0xf7 /4, /5, /6 and /7. */
18344FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18345{
18346 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18347
18348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18349 {
18350 /* register access */
18351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18352 switch (pVCpu->iem.s.enmEffOpSize)
18353 {
18354 case IEMMODE_16BIT:
18355 {
18356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18357 IEM_MC_BEGIN(4, 1);
18358 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18359 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18360 IEM_MC_ARG(uint16_t, u16Value, 2);
18361 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18362 IEM_MC_LOCAL(int32_t, rc);
18363
18364 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18365 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18366 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18367 IEM_MC_REF_EFLAGS(pEFlags);
18368 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18369 IEM_MC_IF_LOCAL_IS_Z(rc) {
18370 IEM_MC_ADVANCE_RIP();
18371 } IEM_MC_ELSE() {
18372 IEM_MC_RAISE_DIVIDE_ERROR();
18373 } IEM_MC_ENDIF();
18374
18375 IEM_MC_END();
18376 return VINF_SUCCESS;
18377 }
18378
18379 case IEMMODE_32BIT:
18380 {
18381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18382 IEM_MC_BEGIN(4, 1);
18383 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18384 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18385 IEM_MC_ARG(uint32_t, u32Value, 2);
18386 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18387 IEM_MC_LOCAL(int32_t, rc);
18388
18389 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18390 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18391 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18392 IEM_MC_REF_EFLAGS(pEFlags);
18393 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18394 IEM_MC_IF_LOCAL_IS_Z(rc) {
18395 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18396 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18397 IEM_MC_ADVANCE_RIP();
18398 } IEM_MC_ELSE() {
18399 IEM_MC_RAISE_DIVIDE_ERROR();
18400 } IEM_MC_ENDIF();
18401
18402 IEM_MC_END();
18403 return VINF_SUCCESS;
18404 }
18405
18406 case IEMMODE_64BIT:
18407 {
18408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18409 IEM_MC_BEGIN(4, 1);
18410 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18411 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18412 IEM_MC_ARG(uint64_t, u64Value, 2);
18413 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18414 IEM_MC_LOCAL(int32_t, rc);
18415
18416 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18417 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18418 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18419 IEM_MC_REF_EFLAGS(pEFlags);
18420 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18421 IEM_MC_IF_LOCAL_IS_Z(rc) {
18422 IEM_MC_ADVANCE_RIP();
18423 } IEM_MC_ELSE() {
18424 IEM_MC_RAISE_DIVIDE_ERROR();
18425 } IEM_MC_ENDIF();
18426
18427 IEM_MC_END();
18428 return VINF_SUCCESS;
18429 }
18430
18431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18432 }
18433 }
18434 else
18435 {
18436 /* memory access. */
18437 switch (pVCpu->iem.s.enmEffOpSize)
18438 {
18439 case IEMMODE_16BIT:
18440 {
18441 IEM_MC_BEGIN(4, 2);
18442 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18443 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18444 IEM_MC_ARG(uint16_t, u16Value, 2);
18445 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18447 IEM_MC_LOCAL(int32_t, rc);
18448
18449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18451 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18452 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18453 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18454 IEM_MC_REF_EFLAGS(pEFlags);
18455 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18456 IEM_MC_IF_LOCAL_IS_Z(rc) {
18457 IEM_MC_ADVANCE_RIP();
18458 } IEM_MC_ELSE() {
18459 IEM_MC_RAISE_DIVIDE_ERROR();
18460 } IEM_MC_ENDIF();
18461
18462 IEM_MC_END();
18463 return VINF_SUCCESS;
18464 }
18465
18466 case IEMMODE_32BIT:
18467 {
18468 IEM_MC_BEGIN(4, 2);
18469 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18470 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18471 IEM_MC_ARG(uint32_t, u32Value, 2);
18472 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18474 IEM_MC_LOCAL(int32_t, rc);
18475
18476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18478 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18479 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18480 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18481 IEM_MC_REF_EFLAGS(pEFlags);
18482 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18483 IEM_MC_IF_LOCAL_IS_Z(rc) {
18484 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18485 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18486 IEM_MC_ADVANCE_RIP();
18487 } IEM_MC_ELSE() {
18488 IEM_MC_RAISE_DIVIDE_ERROR();
18489 } IEM_MC_ENDIF();
18490
18491 IEM_MC_END();
18492 return VINF_SUCCESS;
18493 }
18494
18495 case IEMMODE_64BIT:
18496 {
18497 IEM_MC_BEGIN(4, 2);
18498 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18499 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18500 IEM_MC_ARG(uint64_t, u64Value, 2);
18501 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18503 IEM_MC_LOCAL(int32_t, rc);
18504
18505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18507 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18508 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18509 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18510 IEM_MC_REF_EFLAGS(pEFlags);
18511 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18512 IEM_MC_IF_LOCAL_IS_Z(rc) {
18513 IEM_MC_ADVANCE_RIP();
18514 } IEM_MC_ELSE() {
18515 IEM_MC_RAISE_DIVIDE_ERROR();
18516 } IEM_MC_ENDIF();
18517
18518 IEM_MC_END();
18519 return VINF_SUCCESS;
18520 }
18521
18522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18523 }
18524 }
18525}
18526
18527/** Opcode 0xf6. */
18528FNIEMOP_DEF(iemOp_Grp3_Eb)
18529{
18530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18531 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18532 {
18533 case 0:
18534 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18535 case 1:
18536/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18537 return IEMOP_RAISE_INVALID_OPCODE();
18538 case 2:
18539 IEMOP_MNEMONIC(not_Eb, "not Eb");
18540 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18541 case 3:
18542 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18543 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18544 case 4:
18545 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18547 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18548 case 5:
18549 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18550 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18551 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18552 case 6:
18553 IEMOP_MNEMONIC(div_Eb, "div Eb");
18554 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18555 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18556 case 7:
18557 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18559 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18561 }
18562}
18563
18564
18565/** Opcode 0xf7. */
18566FNIEMOP_DEF(iemOp_Grp3_Ev)
18567{
18568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18569 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18570 {
18571 case 0:
18572 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18573 case 1:
18574/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18575 return IEMOP_RAISE_INVALID_OPCODE();
18576 case 2:
18577 IEMOP_MNEMONIC(not_Ev, "not Ev");
18578 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18579 case 3:
18580 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18581 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18582 case 4:
18583 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18585 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18586 case 5:
18587 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18588 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18589 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18590 case 6:
18591 IEMOP_MNEMONIC(div_Ev, "div Ev");
18592 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18593 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18594 case 7:
18595 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18596 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18597 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18599 }
18600}
18601
18602
18603/** Opcode 0xf8. */
18604FNIEMOP_DEF(iemOp_clc)
18605{
18606 IEMOP_MNEMONIC(clc, "clc");
18607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18608 IEM_MC_BEGIN(0, 0);
18609 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18610 IEM_MC_ADVANCE_RIP();
18611 IEM_MC_END();
18612 return VINF_SUCCESS;
18613}
18614
18615
18616/** Opcode 0xf9. */
18617FNIEMOP_DEF(iemOp_stc)
18618{
18619 IEMOP_MNEMONIC(stc, "stc");
18620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18621 IEM_MC_BEGIN(0, 0);
18622 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18623 IEM_MC_ADVANCE_RIP();
18624 IEM_MC_END();
18625 return VINF_SUCCESS;
18626}
18627
18628
18629/** Opcode 0xfa. */
18630FNIEMOP_DEF(iemOp_cli)
18631{
18632 IEMOP_MNEMONIC(cli, "cli");
18633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18634 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18635}
18636
18637
18638FNIEMOP_DEF(iemOp_sti)
18639{
18640 IEMOP_MNEMONIC(sti, "sti");
18641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18642 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18643}
18644
18645
18646/** Opcode 0xfc. */
18647FNIEMOP_DEF(iemOp_cld)
18648{
18649 IEMOP_MNEMONIC(cld, "cld");
18650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18651 IEM_MC_BEGIN(0, 0);
18652 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18653 IEM_MC_ADVANCE_RIP();
18654 IEM_MC_END();
18655 return VINF_SUCCESS;
18656}
18657
18658
18659/** Opcode 0xfd. */
18660FNIEMOP_DEF(iemOp_std)
18661{
18662 IEMOP_MNEMONIC(std, "std");
18663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18664 IEM_MC_BEGIN(0, 0);
18665 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18666 IEM_MC_ADVANCE_RIP();
18667 IEM_MC_END();
18668 return VINF_SUCCESS;
18669}
18670
18671
18672/** Opcode 0xfe. */
18673FNIEMOP_DEF(iemOp_Grp4)
18674{
18675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18676 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18677 {
18678 case 0:
18679 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18680 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18681 case 1:
18682 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18683 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18684 default:
18685 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18686 return IEMOP_RAISE_INVALID_OPCODE();
18687 }
18688}
18689
18690
18691/**
18692 * Opcode 0xff /2.
18693 * @param bRm The RM byte.
18694 */
18695FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18696{
18697 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18698 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18699
18700 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18701 {
18702 /* The new RIP is taken from a register. */
18703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18704 switch (pVCpu->iem.s.enmEffOpSize)
18705 {
18706 case IEMMODE_16BIT:
18707 IEM_MC_BEGIN(1, 0);
18708 IEM_MC_ARG(uint16_t, u16Target, 0);
18709 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18710 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18711 IEM_MC_END()
18712 return VINF_SUCCESS;
18713
18714 case IEMMODE_32BIT:
18715 IEM_MC_BEGIN(1, 0);
18716 IEM_MC_ARG(uint32_t, u32Target, 0);
18717 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18718 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18719 IEM_MC_END()
18720 return VINF_SUCCESS;
18721
18722 case IEMMODE_64BIT:
18723 IEM_MC_BEGIN(1, 0);
18724 IEM_MC_ARG(uint64_t, u64Target, 0);
18725 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18726 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18727 IEM_MC_END()
18728 return VINF_SUCCESS;
18729
18730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18731 }
18732 }
18733 else
18734 {
18735 /* The new RIP is taken from a register. */
18736 switch (pVCpu->iem.s.enmEffOpSize)
18737 {
18738 case IEMMODE_16BIT:
18739 IEM_MC_BEGIN(1, 1);
18740 IEM_MC_ARG(uint16_t, u16Target, 0);
18741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18744 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18745 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18746 IEM_MC_END()
18747 return VINF_SUCCESS;
18748
18749 case IEMMODE_32BIT:
18750 IEM_MC_BEGIN(1, 1);
18751 IEM_MC_ARG(uint32_t, u32Target, 0);
18752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18755 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18756 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18757 IEM_MC_END()
18758 return VINF_SUCCESS;
18759
18760 case IEMMODE_64BIT:
18761 IEM_MC_BEGIN(1, 1);
18762 IEM_MC_ARG(uint64_t, u64Target, 0);
18763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18766 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18767 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18768 IEM_MC_END()
18769 return VINF_SUCCESS;
18770
18771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18772 }
18773 }
18774}
18775
18776typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18777
18778FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18779{
18780 /* Registers? How?? */
18781 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18782 { /* likely */ }
18783 else
18784 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18785
18786 /* Far pointer loaded from memory. */
18787 switch (pVCpu->iem.s.enmEffOpSize)
18788 {
18789 case IEMMODE_16BIT:
18790 IEM_MC_BEGIN(3, 1);
18791 IEM_MC_ARG(uint16_t, u16Sel, 0);
18792 IEM_MC_ARG(uint16_t, offSeg, 1);
18793 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18797 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18798 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18799 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18800 IEM_MC_END();
18801 return VINF_SUCCESS;
18802
18803 case IEMMODE_64BIT:
18804 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18805 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18806 * and call far qword [rsp] encodings. */
18807 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18808 {
18809 IEM_MC_BEGIN(3, 1);
18810 IEM_MC_ARG(uint16_t, u16Sel, 0);
18811 IEM_MC_ARG(uint64_t, offSeg, 1);
18812 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18816 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18817 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18818 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18819 IEM_MC_END();
18820 return VINF_SUCCESS;
18821 }
18822 /* AMD falls thru. */
18823 /* fall thru */
18824
18825 case IEMMODE_32BIT:
18826 IEM_MC_BEGIN(3, 1);
18827 IEM_MC_ARG(uint16_t, u16Sel, 0);
18828 IEM_MC_ARG(uint32_t, offSeg, 1);
18829 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18833 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18834 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18835 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18836 IEM_MC_END();
18837 return VINF_SUCCESS;
18838
18839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18840 }
18841}
18842
18843
18844/**
18845 * Opcode 0xff /3.
18846 * @param bRm The RM byte.
18847 */
18848FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18849{
18850 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18851 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18852}
18853
18854
18855/**
18856 * Opcode 0xff /4.
18857 * @param bRm The RM byte.
18858 */
18859FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18860{
18861 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18862 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18863
18864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18865 {
18866 /* The new RIP is taken from a register. */
18867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18868 switch (pVCpu->iem.s.enmEffOpSize)
18869 {
18870 case IEMMODE_16BIT:
18871 IEM_MC_BEGIN(0, 1);
18872 IEM_MC_LOCAL(uint16_t, u16Target);
18873 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18874 IEM_MC_SET_RIP_U16(u16Target);
18875 IEM_MC_END()
18876 return VINF_SUCCESS;
18877
18878 case IEMMODE_32BIT:
18879 IEM_MC_BEGIN(0, 1);
18880 IEM_MC_LOCAL(uint32_t, u32Target);
18881 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18882 IEM_MC_SET_RIP_U32(u32Target);
18883 IEM_MC_END()
18884 return VINF_SUCCESS;
18885
18886 case IEMMODE_64BIT:
18887 IEM_MC_BEGIN(0, 1);
18888 IEM_MC_LOCAL(uint64_t, u64Target);
18889 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18890 IEM_MC_SET_RIP_U64(u64Target);
18891 IEM_MC_END()
18892 return VINF_SUCCESS;
18893
18894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18895 }
18896 }
18897 else
18898 {
18899 /* The new RIP is taken from a memory location. */
18900 switch (pVCpu->iem.s.enmEffOpSize)
18901 {
18902 case IEMMODE_16BIT:
18903 IEM_MC_BEGIN(0, 2);
18904 IEM_MC_LOCAL(uint16_t, u16Target);
18905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18908 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18909 IEM_MC_SET_RIP_U16(u16Target);
18910 IEM_MC_END()
18911 return VINF_SUCCESS;
18912
18913 case IEMMODE_32BIT:
18914 IEM_MC_BEGIN(0, 2);
18915 IEM_MC_LOCAL(uint32_t, u32Target);
18916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18919 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18920 IEM_MC_SET_RIP_U32(u32Target);
18921 IEM_MC_END()
18922 return VINF_SUCCESS;
18923
18924 case IEMMODE_64BIT:
18925 IEM_MC_BEGIN(0, 2);
18926 IEM_MC_LOCAL(uint64_t, u64Target);
18927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18930 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18931 IEM_MC_SET_RIP_U64(u64Target);
18932 IEM_MC_END()
18933 return VINF_SUCCESS;
18934
18935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18936 }
18937 }
18938}
18939
18940
18941/**
18942 * Opcode 0xff /5.
18943 * @param bRm The RM byte.
18944 */
18945FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18946{
18947 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18948 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18949}
18950
18951
18952/**
18953 * Opcode 0xff /6.
18954 * @param bRm The RM byte.
18955 */
18956FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18957{
18958 IEMOP_MNEMONIC(push_Ev, "push Ev");
18959
18960 /* Registers are handled by a common worker. */
18961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18962 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18963
18964 /* Memory we do here. */
18965 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18966 switch (pVCpu->iem.s.enmEffOpSize)
18967 {
18968 case IEMMODE_16BIT:
18969 IEM_MC_BEGIN(0, 2);
18970 IEM_MC_LOCAL(uint16_t, u16Src);
18971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18974 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18975 IEM_MC_PUSH_U16(u16Src);
18976 IEM_MC_ADVANCE_RIP();
18977 IEM_MC_END();
18978 return VINF_SUCCESS;
18979
18980 case IEMMODE_32BIT:
18981 IEM_MC_BEGIN(0, 2);
18982 IEM_MC_LOCAL(uint32_t, u32Src);
18983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18986 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18987 IEM_MC_PUSH_U32(u32Src);
18988 IEM_MC_ADVANCE_RIP();
18989 IEM_MC_END();
18990 return VINF_SUCCESS;
18991
18992 case IEMMODE_64BIT:
18993 IEM_MC_BEGIN(0, 2);
18994 IEM_MC_LOCAL(uint64_t, u64Src);
18995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18998 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18999 IEM_MC_PUSH_U64(u64Src);
19000 IEM_MC_ADVANCE_RIP();
19001 IEM_MC_END();
19002 return VINF_SUCCESS;
19003
19004 IEM_NOT_REACHED_DEFAULT_CASE_RET();
19005 }
19006}
19007
19008
19009/** Opcode 0xff. */
19010FNIEMOP_DEF(iemOp_Grp5)
19011{
19012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
19013 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
19014 {
19015 case 0:
19016 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
19017 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
19018 case 1:
19019 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
19020 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
19021 case 2:
19022 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
19023 case 3:
19024 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
19025 case 4:
19026 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
19027 case 5:
19028 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
19029 case 6:
19030 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
19031 case 7:
19032 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
19033 return IEMOP_RAISE_INVALID_OPCODE();
19034 }
19035 AssertFailedReturn(VERR_IEM_IPE_3);
19036}
19037
19038
19039
19040const PFNIEMOP g_apfnOneByteMap[256] =
19041{
19042 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
19043 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
19044 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
19045 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
19046 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
19047 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
19048 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
19049 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
19050 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
19051 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
19052 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
19053 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
19054 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
19055 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
19056 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
19057 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
19058 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
19059 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
19060 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
19061 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
19062 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
19063 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
19064 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
19065 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
19066 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
19067 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
19068 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
19069 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
19070 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
19071 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
19072 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
19073 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
19074 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
19075 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
19076 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
19077 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
19078 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
19079 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
19080 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
19081 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
19082 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
19083 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
19084 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
19085 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
19086 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
19087 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
19088 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
19089 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
19090 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
19091 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
19092 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
19093 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
19094 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
19095 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
19096 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
19097 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
19098 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
19099 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
19100 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
19101 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
19102 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
19103 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
19104 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
19105 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
19106};
19107
19108
19109/** @} */
19110
19111#ifdef _MSC_VER
19112# pragma warning(pop)
19113#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette