VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65751

Last change on this file since 65751 was 65751, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x6f split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 665.3 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65751 2017-02-13 08:25:42Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
560 {
561 RTGCPTR GCPtrEff;
562 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
563 if (rcStrict != VINF_SUCCESS)
564 return rcStrict;
565 }
566#endif
567 IEMOP_HLP_DONE_DECODING();
568 }
569 return IEMOP_RAISE_INVALID_OPCODE();
570}
571
572
573/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
574 * immediate. */
575FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
576{
577 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
578 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
579 {
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
581#ifndef TST_IEM_CHECK_MC
582 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
583 {
584 RTGCPTR GCPtrEff;
585 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
586 if (rcStrict != VINF_SUCCESS)
587 return rcStrict;
588 }
589#endif
590 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
591 IEMOP_HLP_DONE_DECODING();
592 }
593 return IEMOP_RAISE_INVALID_OPCODE();
594}
595
596
597/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
598 * sequence. */
599FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
600{
601 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
603 {
604 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
606#ifndef TST_IEM_CHECK_MC
607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
608 {
609 RTGCPTR GCPtrEff;
610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
611 if (rcStrict != VINF_SUCCESS)
612 return rcStrict;
613 }
614#endif
615 IEMOP_HLP_DONE_DECODING();
616 }
617 return IEMOP_RAISE_INVALID_OPCODE();
618}
619
620
621/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
622 * a 8-byte immediate. */
623FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
624{
625 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
626 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
627 {
628 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
630#ifndef TST_IEM_CHECK_MC
631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
632 {
633 RTGCPTR GCPtrEff;
634 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
635 if (rcStrict != VINF_SUCCESS)
636 return rcStrict;
637 }
638#endif
639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
640 IEMOP_HLP_DONE_DECODING();
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643}
644
645
646
647/** @name ..... opcodes.
648 *
649 * @{
650 */
651
652/** @} */
653
654
655/** @name Two byte opcodes (first byte 0x0f).
656 *
657 * @{
658 */
659
660/** Opcode 0x0f 0x00 /0. */
661FNIEMOPRM_DEF(iemOp_Grp6_sldt)
662{
663 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
664 IEMOP_HLP_MIN_286();
665 IEMOP_HLP_NO_REAL_OR_V86_MODE();
666
667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
668 {
669 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
670 switch (pVCpu->iem.s.enmEffOpSize)
671 {
672 case IEMMODE_16BIT:
673 IEM_MC_BEGIN(0, 1);
674 IEM_MC_LOCAL(uint16_t, u16Ldtr);
675 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
676 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
677 IEM_MC_ADVANCE_RIP();
678 IEM_MC_END();
679 break;
680
681 case IEMMODE_32BIT:
682 IEM_MC_BEGIN(0, 1);
683 IEM_MC_LOCAL(uint32_t, u32Ldtr);
684 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
685 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
686 IEM_MC_ADVANCE_RIP();
687 IEM_MC_END();
688 break;
689
690 case IEMMODE_64BIT:
691 IEM_MC_BEGIN(0, 1);
692 IEM_MC_LOCAL(uint64_t, u64Ldtr);
693 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
694 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
695 IEM_MC_ADVANCE_RIP();
696 IEM_MC_END();
697 break;
698
699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
700 }
701 }
702 else
703 {
704 IEM_MC_BEGIN(0, 2);
705 IEM_MC_LOCAL(uint16_t, u16Ldtr);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
708 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
709 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
710 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
711 IEM_MC_ADVANCE_RIP();
712 IEM_MC_END();
713 }
714 return VINF_SUCCESS;
715}
716
717
718/** Opcode 0x0f 0x00 /1. */
719FNIEMOPRM_DEF(iemOp_Grp6_str)
720{
721 IEMOP_MNEMONIC(str, "str Rv/Mw");
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 switch (pVCpu->iem.s.enmEffOpSize)
729 {
730 case IEMMODE_16BIT:
731 IEM_MC_BEGIN(0, 1);
732 IEM_MC_LOCAL(uint16_t, u16Tr);
733 IEM_MC_FETCH_TR_U16(u16Tr);
734 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
735 IEM_MC_ADVANCE_RIP();
736 IEM_MC_END();
737 break;
738
739 case IEMMODE_32BIT:
740 IEM_MC_BEGIN(0, 1);
741 IEM_MC_LOCAL(uint32_t, u32Tr);
742 IEM_MC_FETCH_TR_U32(u32Tr);
743 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
744 IEM_MC_ADVANCE_RIP();
745 IEM_MC_END();
746 break;
747
748 case IEMMODE_64BIT:
749 IEM_MC_BEGIN(0, 1);
750 IEM_MC_LOCAL(uint64_t, u64Tr);
751 IEM_MC_FETCH_TR_U64(u64Tr);
752 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
753 IEM_MC_ADVANCE_RIP();
754 IEM_MC_END();
755 break;
756
757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
758 }
759 }
760 else
761 {
762 IEM_MC_BEGIN(0, 2);
763 IEM_MC_LOCAL(uint16_t, u16Tr);
764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
766 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
767 IEM_MC_FETCH_TR_U16(u16Tr);
768 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 return VINF_SUCCESS;
773}
774
775
776/** Opcode 0x0f 0x00 /2. */
777FNIEMOPRM_DEF(iemOp_Grp6_lldt)
778{
779 IEMOP_MNEMONIC(lldt, "lldt Ew");
780 IEMOP_HLP_MIN_286();
781 IEMOP_HLP_NO_REAL_OR_V86_MODE();
782
783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
784 {
785 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
786 IEM_MC_BEGIN(1, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 0);
788 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 else
793 {
794 IEM_MC_BEGIN(1, 1);
795 IEM_MC_ARG(uint16_t, u16Sel, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
799 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
800 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
802 IEM_MC_END();
803 }
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x00 /3. */
809FNIEMOPRM_DEF(iemOp_Grp6_ltr)
810{
811 IEMOP_MNEMONIC(ltr, "ltr Ew");
812 IEMOP_HLP_MIN_286();
813 IEMOP_HLP_NO_REAL_OR_V86_MODE();
814
815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
816 {
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_BEGIN(1, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 0);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 else
825 {
826 IEM_MC_BEGIN(1, 1);
827 IEM_MC_ARG(uint16_t, u16Sel, 0);
828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
832 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
833 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
834 IEM_MC_END();
835 }
836 return VINF_SUCCESS;
837}
838
839
840/** Opcode 0x0f 0x00 /3. */
841FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
842{
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_NO_REAL_OR_V86_MODE();
845
846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
847 {
848 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Sel, 0);
851 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
852 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 else
857 {
858 IEM_MC_BEGIN(2, 1);
859 IEM_MC_ARG(uint16_t, u16Sel, 0);
860 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
863 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
864 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
866 IEM_MC_END();
867 }
868 return VINF_SUCCESS;
869}
870
871
872/** Opcode 0x0f 0x00 /4. */
873FNIEMOPRM_DEF(iemOp_Grp6_verr)
874{
875 IEMOP_MNEMONIC(verr, "verr Ew");
876 IEMOP_HLP_MIN_286();
877 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
878}
879
880
881/** Opcode 0x0f 0x00 /5. */
882FNIEMOPRM_DEF(iemOp_Grp6_verw)
883{
884 IEMOP_MNEMONIC(verw, "verw Ew");
885 IEMOP_HLP_MIN_286();
886 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
887}
888
889
890/**
891 * Group 6 jump table.
892 */
893IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
894{
895 iemOp_Grp6_sldt,
896 iemOp_Grp6_str,
897 iemOp_Grp6_lldt,
898 iemOp_Grp6_ltr,
899 iemOp_Grp6_verr,
900 iemOp_Grp6_verw,
901 iemOp_InvalidWithRM,
902 iemOp_InvalidWithRM
903};
904
905/** Opcode 0x0f 0x00. */
906FNIEMOP_DEF(iemOp_Grp6)
907{
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
910}
911
912
913/** Opcode 0x0f 0x01 /0. */
914FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
915{
916 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
917 IEMOP_HLP_MIN_286();
918 IEMOP_HLP_64BIT_OP_SIZE();
919 IEM_MC_BEGIN(2, 1);
920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
925 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 /0. */
932FNIEMOP_DEF(iemOp_Grp7_vmcall)
933{
934 IEMOP_BITCH_ABOUT_STUB();
935 return IEMOP_RAISE_INVALID_OPCODE();
936}
937
938
939/** Opcode 0x0f 0x01 /0. */
940FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
941{
942 IEMOP_BITCH_ABOUT_STUB();
943 return IEMOP_RAISE_INVALID_OPCODE();
944}
945
946
947/** Opcode 0x0f 0x01 /0. */
948FNIEMOP_DEF(iemOp_Grp7_vmresume)
949{
950 IEMOP_BITCH_ABOUT_STUB();
951 return IEMOP_RAISE_INVALID_OPCODE();
952}
953
954
955/** Opcode 0x0f 0x01 /0. */
956FNIEMOP_DEF(iemOp_Grp7_vmxoff)
957{
958 IEMOP_BITCH_ABOUT_STUB();
959 return IEMOP_RAISE_INVALID_OPCODE();
960}
961
962
963/** Opcode 0x0f 0x01 /1. */
964FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
965{
966 IEMOP_MNEMONIC(sidt, "sidt Ms");
967 IEMOP_HLP_MIN_286();
968 IEMOP_HLP_64BIT_OP_SIZE();
969 IEM_MC_BEGIN(2, 1);
970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
971 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
975 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
976 IEM_MC_END();
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /1. */
982FNIEMOP_DEF(iemOp_Grp7_monitor)
983{
984 IEMOP_MNEMONIC(monitor, "monitor");
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
986 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
987}
988
989
990/** Opcode 0x0f 0x01 /1. */
991FNIEMOP_DEF(iemOp_Grp7_mwait)
992{
993 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
996}
997
998
999/** Opcode 0x0f 0x01 /2. */
1000FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1001{
1002 IEMOP_MNEMONIC(lgdt, "lgdt");
1003 IEMOP_HLP_64BIT_OP_SIZE();
1004 IEM_MC_BEGIN(3, 1);
1005 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1006 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1007 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1011 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1012 IEM_MC_END();
1013 return VINF_SUCCESS;
1014}
1015
1016
1017/** Opcode 0x0f 0x01 0xd0. */
1018FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1019{
1020 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1021 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x0f 0x01 0xd1. */
1031FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1032{
1033 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1034 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1035 {
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1037 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1038 }
1039 return IEMOP_RAISE_INVALID_OPCODE();
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /3. */
1044FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1045{
1046 IEMOP_MNEMONIC(lidt, "lidt");
1047 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1048 ? IEMMODE_64BIT
1049 : pVCpu->iem.s.enmEffOpSize;
1050 IEM_MC_BEGIN(3, 1);
1051 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1057 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1058 IEM_MC_END();
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/** Opcode 0x0f 0x01 0xd8. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1065
1066/** Opcode 0x0f 0x01 0xd9. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1068
1069/** Opcode 0x0f 0x01 0xda. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1071
1072/** Opcode 0x0f 0x01 0xdb. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1074
1075/** Opcode 0x0f 0x01 0xdc. */
1076FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1077
1078/** Opcode 0x0f 0x01 0xdd. */
1079FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1080
1081/** Opcode 0x0f 0x01 0xde. */
1082FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1083
1084/** Opcode 0x0f 0x01 0xdf. */
1085FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1086
1087/** Opcode 0x0f 0x01 /4. */
1088FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1089{
1090 IEMOP_MNEMONIC(smsw, "smsw");
1091 IEMOP_HLP_MIN_286();
1092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1093 {
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 switch (pVCpu->iem.s.enmEffOpSize)
1096 {
1097 case IEMMODE_16BIT:
1098 IEM_MC_BEGIN(0, 1);
1099 IEM_MC_LOCAL(uint16_t, u16Tmp);
1100 IEM_MC_FETCH_CR0_U16(u16Tmp);
1101 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1102 { /* likely */ }
1103 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1104 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1105 else
1106 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1107 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 return VINF_SUCCESS;
1111
1112 case IEMMODE_32BIT:
1113 IEM_MC_BEGIN(0, 1);
1114 IEM_MC_LOCAL(uint32_t, u32Tmp);
1115 IEM_MC_FETCH_CR0_U32(u32Tmp);
1116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 return VINF_SUCCESS;
1120
1121 case IEMMODE_64BIT:
1122 IEM_MC_BEGIN(0, 1);
1123 IEM_MC_LOCAL(uint64_t, u64Tmp);
1124 IEM_MC_FETCH_CR0_U64(u64Tmp);
1125 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1126 IEM_MC_ADVANCE_RIP();
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129
1130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1131 }
1132 }
1133 else
1134 {
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(0, 2);
1137 IEM_MC_LOCAL(uint16_t, u16Tmp);
1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_FETCH_CR0_U16(u16Tmp);
1142 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1143 { /* likely */ }
1144 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1145 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1146 else
1147 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 return VINF_SUCCESS;
1152 }
1153}
1154
1155
1156/** Opcode 0x0f 0x01 /6. */
1157FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1158{
1159 /* The operand size is effectively ignored, all is 16-bit and only the
1160 lower 3-bits are used. */
1161 IEMOP_MNEMONIC(lmsw, "lmsw");
1162 IEMOP_HLP_MIN_286();
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(1, 0);
1167 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1168 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1170 IEM_MC_END();
1171 }
1172 else
1173 {
1174 IEM_MC_BEGIN(1, 1);
1175 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1179 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1180 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x01 /7. */
1188FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1189{
1190 IEMOP_MNEMONIC(invlpg, "invlpg");
1191 IEMOP_HLP_MIN_486();
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(1, 1);
1194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1196 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199}
1200
1201
1202/** Opcode 0x0f 0x01 /7. */
1203FNIEMOP_DEF(iemOp_Grp7_swapgs)
1204{
1205 IEMOP_MNEMONIC(swapgs, "swapgs");
1206 IEMOP_HLP_ONLY_64BIT();
1207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1209}
1210
1211
1212/** Opcode 0x0f 0x01 /7. */
1213FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1214{
1215 NOREF(pVCpu);
1216 IEMOP_BITCH_ABOUT_STUB();
1217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1218}
1219
1220
1221/** Opcode 0x0f 0x01. */
1222FNIEMOP_DEF(iemOp_Grp7)
1223{
1224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1226 {
1227 case 0:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1233 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1234 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1235 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1236 }
1237 return IEMOP_RAISE_INVALID_OPCODE();
1238
1239 case 1:
1240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1241 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1242 switch (bRm & X86_MODRM_RM_MASK)
1243 {
1244 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1245 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1246 }
1247 return IEMOP_RAISE_INVALID_OPCODE();
1248
1249 case 2:
1250 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1251 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1252 switch (bRm & X86_MODRM_RM_MASK)
1253 {
1254 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1255 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1256 }
1257 return IEMOP_RAISE_INVALID_OPCODE();
1258
1259 case 3:
1260 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1261 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1262 switch (bRm & X86_MODRM_RM_MASK)
1263 {
1264 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1265 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1266 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1267 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1268 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1269 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1270 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1271 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1273 }
1274
1275 case 4:
1276 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1277
1278 case 5:
1279 return IEMOP_RAISE_INVALID_OPCODE();
1280
1281 case 6:
1282 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1283
1284 case 7:
1285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1286 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1287 switch (bRm & X86_MODRM_RM_MASK)
1288 {
1289 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1290 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1291 }
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296}
1297
1298/** Opcode 0x0f 0x00 /3. */
1299FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1300{
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1303
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1307 switch (pVCpu->iem.s.enmEffOpSize)
1308 {
1309 case IEMMODE_16BIT:
1310 {
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Sel, 1);
1314 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1315
1316 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1318 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1319
1320 IEM_MC_END();
1321 return VINF_SUCCESS;
1322 }
1323
1324 case IEMMODE_32BIT:
1325 case IEMMODE_64BIT:
1326 {
1327 IEM_MC_BEGIN(3, 0);
1328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Sel, 1);
1330 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1331
1332 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1333 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1334 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1335
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1341 }
1342 }
1343 else
1344 {
1345 switch (pVCpu->iem.s.enmEffOpSize)
1346 {
1347 case IEMMODE_16BIT:
1348 {
1349 IEM_MC_BEGIN(3, 1);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Sel, 1);
1352 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1354
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1356 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1357
1358 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1361
1362 IEM_MC_END();
1363 return VINF_SUCCESS;
1364 }
1365
1366 case IEMMODE_32BIT:
1367 case IEMMODE_64BIT:
1368 {
1369 IEM_MC_BEGIN(3, 1);
1370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1371 IEM_MC_ARG(uint16_t, u16Sel, 1);
1372 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1377/** @todo testcase: make sure it's a 16-bit read. */
1378
1379 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1380 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1381 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1382
1383 IEM_MC_END();
1384 return VINF_SUCCESS;
1385 }
1386
1387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1388 }
1389 }
1390}
1391
1392
1393
1394/** Opcode 0x0f 0x02. */
1395FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1396{
1397 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1398 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1399}
1400
1401
1402/** Opcode 0x0f 0x03. */
1403FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1404{
1405 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1406 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1407}
1408
1409
1410/** Opcode 0x0f 0x05. */
1411FNIEMOP_DEF(iemOp_syscall)
1412{
1413 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1416}
1417
1418
1419/** Opcode 0x0f 0x06. */
1420FNIEMOP_DEF(iemOp_clts)
1421{
1422 IEMOP_MNEMONIC(clts, "clts");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1425}
1426
1427
1428/** Opcode 0x0f 0x07. */
1429FNIEMOP_DEF(iemOp_sysret)
1430{
1431 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1434}
1435
1436
1437/** Opcode 0x0f 0x08. */
1438FNIEMOP_STUB(iemOp_invd);
1439// IEMOP_HLP_MIN_486();
1440
1441
1442/** Opcode 0x0f 0x09. */
1443FNIEMOP_DEF(iemOp_wbinvd)
1444{
1445 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1446 IEMOP_HLP_MIN_486();
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS; /* ignore for now */
1453}
1454
1455
1456/** Opcode 0x0f 0x0b. */
1457FNIEMOP_DEF(iemOp_ud2)
1458{
1459 IEMOP_MNEMONIC(ud2, "ud2");
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462
1463/** Opcode 0x0f 0x0d. */
1464FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1465{
1466 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1468 {
1469 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1477 return IEMOP_RAISE_INVALID_OPCODE();
1478 }
1479
1480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1481 {
1482 case 2: /* Aliased to /0 for the time being. */
1483 case 4: /* Aliased to /0 for the time being. */
1484 case 5: /* Aliased to /0 for the time being. */
1485 case 6: /* Aliased to /0 for the time being. */
1486 case 7: /* Aliased to /0 for the time being. */
1487 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1488 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1489 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1491 }
1492
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 /* Currently a NOP. */
1498 NOREF(GCPtrEffSrc);
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 return VINF_SUCCESS;
1502}
1503
1504
1505/** Opcode 0x0f 0x0e. */
1506FNIEMOP_STUB(iemOp_femms);
1507
1508
1509/** Opcode 0x0f 0x0f 0x0c. */
1510FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x0d. */
1513FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x1c. */
1516FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x1d. */
1519FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x8a. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x8e. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x90. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x94. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1532
1533/** Opcode 0x0f 0x0f 0x96. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0x97. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0x9a. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0x9e. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xa0. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xa4. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xa6. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xa7. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xaa. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xae. */
1561FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xb0. */
1564FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xb4. */
1567FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1568
1569/** Opcode 0x0f 0x0f 0xb6. */
1570FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1571
1572/** Opcode 0x0f 0x0f 0xb7. */
1573FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1574
1575/** Opcode 0x0f 0x0f 0xbb. */
1576FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1577
1578/** Opcode 0x0f 0x0f 0xbf. */
1579FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1580
1581
1582/** Opcode 0x0f 0x0f. */
1583FNIEMOP_DEF(iemOp_3Dnow)
1584{
1585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1586 {
1587 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 }
1590
1591 /* This is pretty sparse, use switch instead of table. */
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 switch (b)
1594 {
1595 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1596 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1597 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1598 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1599 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1600 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1601 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1602 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1603 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1604 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1605 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1606 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1607 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1608 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1609 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1610 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1611 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1612 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1613 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1614 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1615 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1616 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1617 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1618 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622}
1623
1624
1625/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1626FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1627/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1628FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1629/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1630FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1631/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1632FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1633
1634
1635/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1636FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1637{
1638 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1646 IEM_MC_BEGIN(0, 0);
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1651 IEM_MC_ADVANCE_RIP();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 /*
1657 * Memory, register.
1658 */
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1679FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1680
1681/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1682FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1683
1684/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1685FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1686{
1687 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1690 {
1691 /*
1692 * Register, register.
1693 */
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697
1698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1700 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1701 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1702
1703 IEM_MC_ADVANCE_RIP();
1704 IEM_MC_END();
1705 }
1706 else
1707 {
1708 /*
1709 * Memory, register.
1710 */
1711 IEM_MC_BEGIN(0, 2);
1712 IEM_MC_LOCAL(uint64_t, uSrc);
1713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1714
1715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1719
1720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1722
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1732
1733/** Opcode 0x66 0x0f 0x12. */
1734FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1735
1736/** Opcode 0xf3 0x0f 0x12. */
1737FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1738
1739/** Opcode 0xf2 0x0f 0x12. */
1740FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1741
1742/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1743FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1744
1745/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1746FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1747{
1748 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752#if 0
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1757 IEM_MC_BEGIN(0, 1);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1761 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765#else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767#endif
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/* Opcode 0xf3 0x0f 0x13 - invalid */
1793/* Opcode 0xf2 0x0f 0x13 - invalid */
1794
1795/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1796FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x14 - invalid */
1800/* Opcode 0xf2 0x0f 0x14 - invalid */
1801/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1802FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1803/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1804FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1805/* Opcode 0xf3 0x0f 0x15 - invalid */
1806/* Opcode 0xf2 0x0f 0x15 - invalid */
1807/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1808FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1809/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1810FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1811/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1812FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1813/* Opcode 0xf2 0x0f 0x16 - invalid */
1814/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1815FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1816/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1817FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1818/* Opcode 0xf3 0x0f 0x17 - invalid */
1819/* Opcode 0xf2 0x0f 0x17 - invalid */
1820
1821
1822/** Opcode 0x0f 0x18. */
1823FNIEMOP_DEF(iemOp_prefetch_Grp16)
1824{
1825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1826 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1827 {
1828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1829 {
1830 case 4: /* Aliased to /0 for the time being according to AMD. */
1831 case 5: /* Aliased to /0 for the time being according to AMD. */
1832 case 6: /* Aliased to /0 for the time being according to AMD. */
1833 case 7: /* Aliased to /0 for the time being according to AMD. */
1834 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1835 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1836 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1837 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1839 }
1840
1841 IEM_MC_BEGIN(0, 1);
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 /* Currently a NOP. */
1846 NOREF(GCPtrEffSrc);
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 return VINF_SUCCESS;
1850 }
1851
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/** Opcode 0x0f 0x19..0x1f. */
1857FNIEMOP_DEF(iemOp_nop_Ev)
1858{
1859 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 IEM_MC_BEGIN(0, 1);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 /* Currently a NOP. */
1875 NOREF(GCPtrEffSrc);
1876 IEM_MC_ADVANCE_RIP();
1877 IEM_MC_END();
1878 }
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** Opcode 0x0f 0x20. */
1884FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1885{
1886 /* mod is ignored, as is operand size overrides. */
1887 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1888 IEMOP_HLP_MIN_386();
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1891 else
1892 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1893
1894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1895 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1897 {
1898 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1900 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1901 iCrReg |= 8;
1902 }
1903 switch (iCrReg)
1904 {
1905 case 0: case 2: case 3: case 4: case 8:
1906 break;
1907 default:
1908 return IEMOP_RAISE_INVALID_OPCODE();
1909 }
1910 IEMOP_HLP_DONE_DECODING();
1911
1912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1913}
1914
1915
1916/** Opcode 0x0f 0x21. */
1917FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1918{
1919 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1920 IEMOP_HLP_MIN_386();
1921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1924 return IEMOP_RAISE_INVALID_OPCODE();
1925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1926 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1927 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1928}
1929
1930
1931/** Opcode 0x0f 0x22. */
1932FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1933{
1934 /* mod is ignored, as is operand size overrides. */
1935 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1936 IEMOP_HLP_MIN_386();
1937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1938 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1939 else
1940 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1941
1942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1943 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1945 {
1946 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1948 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1949 iCrReg |= 8;
1950 }
1951 switch (iCrReg)
1952 {
1953 case 0: case 2: case 3: case 4: case 8:
1954 break;
1955 default:
1956 return IEMOP_RAISE_INVALID_OPCODE();
1957 }
1958 IEMOP_HLP_DONE_DECODING();
1959
1960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1961}
1962
1963
1964/** Opcode 0x0f 0x23. */
1965FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1966{
1967 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1968 IEMOP_HLP_MIN_386();
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1972 return IEMOP_RAISE_INVALID_OPCODE();
1973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1974 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1975 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1976}
1977
1978
1979/** Opcode 0x0f 0x24. */
1980FNIEMOP_DEF(iemOp_mov_Rd_Td)
1981{
1982 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1983 /** @todo works on 386 and 486. */
1984 /* The RM byte is not considered, see testcase. */
1985 return IEMOP_RAISE_INVALID_OPCODE();
1986}
1987
1988
1989/** Opcode 0x0f 0x26. */
1990FNIEMOP_DEF(iemOp_mov_Td_Rd)
1991{
1992 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1993 /** @todo works on 386 and 486. */
1994 /* The RM byte is not considered, see testcase. */
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
2000FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 /*
2007 * Register, register.
2008 */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_BEGIN(0, 0);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2042FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2043{
2044 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2056 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081}
2082
2083/* Opcode 0xf3 0x0f 0x28 - invalid */
2084/* Opcode 0xf2 0x0f 0x28 - invalid */
2085
2086/** Opcode 0x0f 0x29. */
2087FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2088{
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2091 else
2092 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2095 {
2096 /*
2097 * Register, register.
2098 */
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2100 IEM_MC_BEGIN(0, 0);
2101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 else
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2106 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2107 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Memory, register.
2115 */
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2124 else
2125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2127
2128 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2129 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135}
2136
2137
2138/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2139FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2140/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2141FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2142/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2143FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2144/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2145FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2146
2147
2148/** Opcode 0x0f 0x2b. */
2149FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2150{
2151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2152 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2153 else
2154 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2157 {
2158 /*
2159 * memory, register.
2160 */
2161 IEM_MC_BEGIN(0, 2);
2162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 else
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172
2173 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2187FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2188/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2189FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2190/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2191FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2192/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2193FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2194
2195/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2196FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2197/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2198FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2199/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2200FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2201/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2202FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2203
2204/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2205FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2206/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2207FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2208/* Opcode 0xf3 0x0f 0x2e - invalid */
2209/* Opcode 0xf2 0x0f 0x2e - invalid */
2210
2211/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2212FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2213/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2214FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2215/* Opcode 0xf3 0x0f 0x2f - invalid */
2216/* Opcode 0xf2 0x0f 0x2f - invalid */
2217
2218/** Opcode 0x0f 0x30. */
2219FNIEMOP_DEF(iemOp_wrmsr)
2220{
2221 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2224}
2225
2226
2227/** Opcode 0x0f 0x31. */
2228FNIEMOP_DEF(iemOp_rdtsc)
2229{
2230 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2233}
2234
2235
2236/** Opcode 0x0f 0x33. */
2237FNIEMOP_DEF(iemOp_rdmsr)
2238{
2239 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2242}
2243
2244
2245/** Opcode 0x0f 0x34. */
2246FNIEMOP_STUB(iemOp_rdpmc);
2247/** Opcode 0x0f 0x34. */
2248FNIEMOP_STUB(iemOp_sysenter);
2249/** Opcode 0x0f 0x35. */
2250FNIEMOP_STUB(iemOp_sysexit);
2251/** Opcode 0x0f 0x37. */
2252FNIEMOP_STUB(iemOp_getsec);
2253/** Opcode 0x0f 0x38. */
2254FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2255/** Opcode 0x0f 0x3a. */
2256FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2257
2258
2259/**
2260 * Implements a conditional move.
2261 *
2262 * Wish there was an obvious way to do this where we could share and reduce
2263 * code bloat.
2264 *
2265 * @param a_Cnd The conditional "microcode" operation.
2266 */
2267#define CMOV_X(a_Cnd) \
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2270 { \
2271 switch (pVCpu->iem.s.enmEffOpSize) \
2272 { \
2273 case IEMMODE_16BIT: \
2274 IEM_MC_BEGIN(0, 1); \
2275 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2276 a_Cnd { \
2277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 1); \
2286 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2287 a_Cnd { \
2288 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2289 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2290 } IEM_MC_ELSE() { \
2291 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2292 } IEM_MC_ENDIF(); \
2293 IEM_MC_ADVANCE_RIP(); \
2294 IEM_MC_END(); \
2295 return VINF_SUCCESS; \
2296 \
2297 case IEMMODE_64BIT: \
2298 IEM_MC_BEGIN(0, 1); \
2299 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2300 a_Cnd { \
2301 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2303 } IEM_MC_ENDIF(); \
2304 IEM_MC_ADVANCE_RIP(); \
2305 IEM_MC_END(); \
2306 return VINF_SUCCESS; \
2307 \
2308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2309 } \
2310 } \
2311 else \
2312 { \
2313 switch (pVCpu->iem.s.enmEffOpSize) \
2314 { \
2315 case IEMMODE_16BIT: \
2316 IEM_MC_BEGIN(0, 2); \
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2318 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2320 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2321 a_Cnd { \
2322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2323 } IEM_MC_ENDIF(); \
2324 IEM_MC_ADVANCE_RIP(); \
2325 IEM_MC_END(); \
2326 return VINF_SUCCESS; \
2327 \
2328 case IEMMODE_32BIT: \
2329 IEM_MC_BEGIN(0, 2); \
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2331 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2333 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2334 a_Cnd { \
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2336 } IEM_MC_ELSE() { \
2337 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2338 } IEM_MC_ENDIF(); \
2339 IEM_MC_ADVANCE_RIP(); \
2340 IEM_MC_END(); \
2341 return VINF_SUCCESS; \
2342 \
2343 case IEMMODE_64BIT: \
2344 IEM_MC_BEGIN(0, 2); \
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2346 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2349 a_Cnd { \
2350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2351 } IEM_MC_ENDIF(); \
2352 IEM_MC_ADVANCE_RIP(); \
2353 IEM_MC_END(); \
2354 return VINF_SUCCESS; \
2355 \
2356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2357 } \
2358 } do {} while (0)
2359
2360
2361
2362/** Opcode 0x0f 0x40. */
2363FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2364{
2365 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2367}
2368
2369
2370/** Opcode 0x0f 0x41. */
2371FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2372{
2373 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2375}
2376
2377
2378/** Opcode 0x0f 0x42. */
2379FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2380{
2381 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2382 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2383}
2384
2385
2386/** Opcode 0x0f 0x43. */
2387FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2388{
2389 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2390 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2391}
2392
2393
2394/** Opcode 0x0f 0x44. */
2395FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2396{
2397 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2399}
2400
2401
2402/** Opcode 0x0f 0x45. */
2403FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2404{
2405 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2407}
2408
2409
2410/** Opcode 0x0f 0x46. */
2411FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2412{
2413 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2414 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2415}
2416
2417
2418/** Opcode 0x0f 0x47. */
2419FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2420{
2421 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2422 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2423}
2424
2425
2426/** Opcode 0x0f 0x48. */
2427FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2428{
2429 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2430 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2431}
2432
2433
2434/** Opcode 0x0f 0x49. */
2435FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2436{
2437 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2438 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2439}
2440
2441
2442/** Opcode 0x0f 0x4a. */
2443FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2446 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2447}
2448
2449
2450/** Opcode 0x0f 0x4b. */
2451FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2452{
2453 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2455}
2456
2457
2458/** Opcode 0x0f 0x4c. */
2459FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2460{
2461 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2462 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2463}
2464
2465
2466/** Opcode 0x0f 0x4d. */
2467FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2468{
2469 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2470 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2471}
2472
2473
2474/** Opcode 0x0f 0x4e. */
2475FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2476{
2477 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2478 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2479}
2480
2481
2482/** Opcode 0x0f 0x4f. */
2483FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2484{
2485 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2486 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2487}
2488
2489#undef CMOV_X
2490
2491/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2492FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2493/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2494FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2495/* Opcode 0xf3 0x0f 0x50 - invalid */
2496/* Opcode 0xf2 0x0f 0x50 - invalid */
2497
2498/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2499FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2500/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2501FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2502/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2503FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2504/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2505FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2506
2507/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2508FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2509/* Opcode 0x66 0x0f 0x52 - invalid */
2510/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2511FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2512/* Opcode 0xf2 0x0f 0x52 - invalid */
2513
2514/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2515FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2516/* Opcode 0x66 0x0f 0x53 - invalid */
2517/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2518FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2519/* Opcode 0xf2 0x0f 0x53 - invalid */
2520
2521/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2522FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2523/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2524FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2525/* Opcode 0xf3 0x0f 0x54 - invalid */
2526/* Opcode 0xf2 0x0f 0x54 - invalid */
2527
2528/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2529FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2530/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2531FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2532/* Opcode 0xf3 0x0f 0x55 - invalid */
2533/* Opcode 0xf2 0x0f 0x55 - invalid */
2534
2535/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2536FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2537/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2538FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2539/* Opcode 0xf3 0x0f 0x56 - invalid */
2540/* Opcode 0xf2 0x0f 0x56 - invalid */
2541
2542/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2543FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2544/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2545FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2546/* Opcode 0xf3 0x0f 0x57 - invalid */
2547/* Opcode 0xf2 0x0f 0x57 - invalid */
2548
2549/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2568FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2569/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2570FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2571/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2572FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2573/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2574FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2575
2576/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2577FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2578/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2579FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2580/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2581FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2582/* Opcode 0xf2 0x0f 0x5b - invalid */
2583
2584/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2585FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2586/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2587FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2588/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2589FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2590/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2591FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2592
2593/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2594FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2595/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2596FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2597/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2598FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2599/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2600FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2601
2602/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2603FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2604/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2605FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2606/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2607FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2608/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2609FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2610
2611/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2612FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2613/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2614FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2615/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2616FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2617/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2618FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2619
2620/**
2621 * Common worker for MMX instructions on the forms:
2622 * pxxxx mm1, mm2/mem32
2623 *
2624 * The 2nd operand is the first half of a register, which in the memory case
2625 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2626 * memory accessed for MMX.
2627 *
2628 * Exceptions type 4.
2629 */
2630FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 {
2635 /*
2636 * Register, register.
2637 */
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2639 IEM_MC_BEGIN(2, 0);
2640 IEM_MC_ARG(uint128_t *, pDst, 0);
2641 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2643 IEM_MC_PREPARE_SSE_USAGE();
2644 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2645 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2646 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2647 IEM_MC_ADVANCE_RIP();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 /*
2653 * Register, memory.
2654 */
2655 IEM_MC_BEGIN(2, 2);
2656 IEM_MC_ARG(uint128_t *, pDst, 0);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2660
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665
2666 IEM_MC_PREPARE_SSE_USAGE();
2667 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2668 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2669
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674}
2675
2676
2677/**
2678 * Common worker for SSE2 instructions on the forms:
2679 * pxxxx xmm1, xmm2/mem128
2680 *
2681 * The 2nd operand is the first half of a register, which in the memory case
2682 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2683 * memory accessed for MMX.
2684 *
2685 * Exceptions type 4.
2686 */
2687FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2688{
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (!pImpl->pfnU64)
2691 return IEMOP_RAISE_INVALID_OPCODE();
2692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2693 {
2694 /*
2695 * Register, register.
2696 */
2697 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2698 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(2, 0);
2701 IEM_MC_ARG(uint64_t *, pDst, 0);
2702 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_PREPARE_FPU_USAGE();
2705 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2706 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2707 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * Register, memory.
2715 */
2716 IEM_MC_BEGIN(2, 2);
2717 IEM_MC_ARG(uint64_t *, pDst, 0);
2718 IEM_MC_LOCAL(uint32_t, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2721
2722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2725 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726
2727 IEM_MC_PREPARE_FPU_USAGE();
2728 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2729 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2730
2731 IEM_MC_ADVANCE_RIP();
2732 IEM_MC_END();
2733 }
2734 return VINF_SUCCESS;
2735}
2736
2737
2738/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2739FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2740{
2741 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2742 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2743}
2744
2745/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2746FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2747{
2748 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2749 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2750}
2751
2752/* Opcode 0xf3 0x0f 0x60 - invalid */
2753
2754
2755/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2756FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2757{
2758 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2759 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2760}
2761
2762/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2763FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2764{
2765 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2766 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2767}
2768
2769/* Opcode 0xf3 0x0f 0x61 - invalid */
2770
2771
2772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2774{
2775 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2777}
2778
2779/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2780FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2781{
2782 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2783 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2784}
2785
2786/* Opcode 0xf3 0x0f 0x62 - invalid */
2787
2788
2789
2790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2791FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x63 - invalid */
2795
2796/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x64 - invalid */
2801
2802/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2805FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2806/* Opcode 0xf3 0x0f 0x65 - invalid */
2807
2808/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2809FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2811FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2812/* Opcode 0xf3 0x0f 0x66 - invalid */
2813
2814/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2815FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2816/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2818/* Opcode 0xf3 0x0f 0x67 - invalid */
2819
2820
2821/**
2822 * Common worker for MMX instructions on the form:
2823 * pxxxx mm1, mm2/mem64
2824 *
2825 * The 2nd operand is the second half of a register, which in the memory case
2826 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2827 * where it may read the full 128 bits or only the upper 64 bits.
2828 *
2829 * Exceptions type 4.
2830 */
2831FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2832{
2833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2834 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /*
2838 * Register, register.
2839 */
2840 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2841 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEM_MC_BEGIN(2, 0);
2844 IEM_MC_ARG(uint64_t *, pDst, 0);
2845 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2846 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2847 IEM_MC_PREPARE_FPU_USAGE();
2848 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2849 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2850 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 else
2855 {
2856 /*
2857 * Register, memory.
2858 */
2859 IEM_MC_BEGIN(2, 2);
2860 IEM_MC_ARG(uint64_t *, pDst, 0);
2861 IEM_MC_LOCAL(uint64_t, uSrc);
2862 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2864
2865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2868 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2869
2870 IEM_MC_PREPARE_FPU_USAGE();
2871 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2872 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2873
2874 IEM_MC_ADVANCE_RIP();
2875 IEM_MC_END();
2876 }
2877 return VINF_SUCCESS;
2878}
2879
2880
2881/**
2882 * Common worker for SSE2 instructions on the form:
2883 * pxxxx xmm1, xmm2/mem128
2884 *
2885 * The 2nd operand is the second half of a register, which in the memory case
2886 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2887 * where it may read the full 128 bits or only the upper 64 bits.
2888 *
2889 * Exceptions type 4.
2890 */
2891FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2892{
2893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2895 {
2896 /*
2897 * Register, register.
2898 */
2899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2900 IEM_MC_BEGIN(2, 0);
2901 IEM_MC_ARG(uint128_t *, pDst, 0);
2902 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2904 IEM_MC_PREPARE_SSE_USAGE();
2905 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2906 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2907 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2908 IEM_MC_ADVANCE_RIP();
2909 IEM_MC_END();
2910 }
2911 else
2912 {
2913 /*
2914 * Register, memory.
2915 */
2916 IEM_MC_BEGIN(2, 2);
2917 IEM_MC_ARG(uint128_t *, pDst, 0);
2918 IEM_MC_LOCAL(uint128_t, uSrc);
2919 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2921
2922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2924 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2925 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2926
2927 IEM_MC_PREPARE_SSE_USAGE();
2928 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2929 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2939FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2940{
2941 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2942 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2943}
2944
2945/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2946FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2947{
2948 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2949 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2950}
2951/* Opcode 0xf3 0x0f 0x68 - invalid */
2952
2953
2954/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2955FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2956{
2957 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2958 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2959}
2960
2961/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2962FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2963{
2964 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2965 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2966
2967}
2968/* Opcode 0xf3 0x0f 0x69 - invalid */
2969
2970
2971/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2972FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2973{
2974 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2975 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2976}
2977
2978/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2979FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2980{
2981 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2982 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2983}
2984/* Opcode 0xf3 0x0f 0x6a - invalid */
2985
2986
2987/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2988FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2989/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2990FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2991/* Opcode 0xf3 0x0f 0x6b - invalid */
2992
2993
2994/* Opcode 0x0f 0x6c - invalid */
2995
2996/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2997FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2998{
2999 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
3000 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3001}
3002
3003/* Opcode 0xf3 0x0f 0x6c - invalid */
3004/* Opcode 0xf2 0x0f 0x6c - invalid */
3005
3006
3007/* Opcode 0x0f 0x6d - invalid */
3008
3009/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
3010FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3011{
3012 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
3013 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3014}
3015
3016/* Opcode 0xf3 0x0f 0x6d - invalid */
3017
3018
3019/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3020FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3021{
3022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3023 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3024 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3025 else
3026 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3028 {
3029 /* MMX, greg */
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_BEGIN(0, 1);
3032 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3037 else
3038 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3039 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3040 IEM_MC_ADVANCE_RIP();
3041 IEM_MC_END();
3042 }
3043 else
3044 {
3045 /* MMX, [mem] */
3046 IEM_MC_BEGIN(0, 2);
3047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3052 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3053 {
3054 IEM_MC_LOCAL(uint64_t, u64Tmp);
3055 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3056 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3057 }
3058 else
3059 {
3060 IEM_MC_LOCAL(uint32_t, u32Tmp);
3061 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3062 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3063 }
3064 IEM_MC_ADVANCE_RIP();
3065 IEM_MC_END();
3066 }
3067 return VINF_SUCCESS;
3068}
3069
3070/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
3071FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3072{
3073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3074 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3075 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
3076 else
3077 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
3078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3079 {
3080 /* XMM, greg*/
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_BEGIN(0, 1);
3083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3085 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3086 {
3087 IEM_MC_LOCAL(uint64_t, u64Tmp);
3088 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3089 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3090 }
3091 else
3092 {
3093 IEM_MC_LOCAL(uint32_t, u32Tmp);
3094 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3095 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3096 }
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 else
3101 {
3102 /* XMM, [mem] */
3103 IEM_MC_BEGIN(0, 2);
3104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3105 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3110 {
3111 IEM_MC_LOCAL(uint64_t, u64Tmp);
3112 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3113 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3114 }
3115 else
3116 {
3117 IEM_MC_LOCAL(uint32_t, u32Tmp);
3118 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3119 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3120 }
3121 IEM_MC_ADVANCE_RIP();
3122 IEM_MC_END();
3123 }
3124 return VINF_SUCCESS;
3125}
3126
3127/* Opcode 0xf3 0x0f 0x6e - invalid */
3128
3129
3130/** Opcode 0x0f 0x6f - movq Pq, Qq */
3131FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3132{
3133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3134 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3136 {
3137 /*
3138 * Register, register.
3139 */
3140 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3141 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3143 IEM_MC_BEGIN(0, 1);
3144 IEM_MC_LOCAL(uint64_t, u64Tmp);
3145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3146 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3147 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3148 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3149 IEM_MC_ADVANCE_RIP();
3150 IEM_MC_END();
3151 }
3152 else
3153 {
3154 /*
3155 * Register, memory.
3156 */
3157 IEM_MC_BEGIN(0, 2);
3158 IEM_MC_LOCAL(uint64_t, u64Tmp);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3164 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3165 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3166 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3167
3168 IEM_MC_ADVANCE_RIP();
3169 IEM_MC_END();
3170 }
3171 return VINF_SUCCESS;
3172}
3173
3174/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
3175FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3176{
3177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3178 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3180 {
3181 /*
3182 * Register, register.
3183 */
3184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3185 IEM_MC_BEGIN(0, 0);
3186 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3187 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3188 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3189 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3190 IEM_MC_ADVANCE_RIP();
3191 IEM_MC_END();
3192 }
3193 else
3194 {
3195 /*
3196 * Register, memory.
3197 */
3198 IEM_MC_BEGIN(0, 2);
3199 IEM_MC_LOCAL(uint128_t, u128Tmp);
3200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3201
3202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3205 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3206 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3207 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3208
3209 IEM_MC_ADVANCE_RIP();
3210 IEM_MC_END();
3211 }
3212 return VINF_SUCCESS;
3213}
3214
3215/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
3216FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3217{
3218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3219 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3221 {
3222 /*
3223 * Register, register.
3224 */
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226 IEM_MC_BEGIN(0, 0);
3227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3229 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3230 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3231 IEM_MC_ADVANCE_RIP();
3232 IEM_MC_END();
3233 }
3234 else
3235 {
3236 /*
3237 * Register, memory.
3238 */
3239 IEM_MC_BEGIN(0, 2);
3240 IEM_MC_LOCAL(uint128_t, u128Tmp);
3241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3242
3243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3247 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3248 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3249
3250 IEM_MC_ADVANCE_RIP();
3251 IEM_MC_END();
3252 }
3253 return VINF_SUCCESS;
3254}
3255
3256
3257/** Opcode 0x0f 0x70. The immediate here is evil! */
3258FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
3259{
3260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3261 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3262 {
3263 case IEM_OP_PRF_SIZE_OP: /* SSE */
3264 case IEM_OP_PRF_REPNZ: /* SSE */
3265 case IEM_OP_PRF_REPZ: /* SSE */
3266 {
3267 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
3268 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3269 {
3270 case IEM_OP_PRF_SIZE_OP:
3271 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
3272 pfnAImpl = iemAImpl_pshufd;
3273 break;
3274 case IEM_OP_PRF_REPNZ:
3275 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
3276 pfnAImpl = iemAImpl_pshuflw;
3277 break;
3278 case IEM_OP_PRF_REPZ:
3279 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
3280 pfnAImpl = iemAImpl_pshufhw;
3281 break;
3282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3283 }
3284 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3285 {
3286 /*
3287 * Register, register.
3288 */
3289 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291
3292 IEM_MC_BEGIN(3, 0);
3293 IEM_MC_ARG(uint128_t *, pDst, 0);
3294 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3295 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3296 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3297 IEM_MC_PREPARE_SSE_USAGE();
3298 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3299 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3300 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3301 IEM_MC_ADVANCE_RIP();
3302 IEM_MC_END();
3303 }
3304 else
3305 {
3306 /*
3307 * Register, memory.
3308 */
3309 IEM_MC_BEGIN(3, 2);
3310 IEM_MC_ARG(uint128_t *, pDst, 0);
3311 IEM_MC_LOCAL(uint128_t, uSrc);
3312 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3314
3315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3316 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3317 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3319 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3320
3321 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3322 IEM_MC_PREPARE_SSE_USAGE();
3323 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3324 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3325
3326 IEM_MC_ADVANCE_RIP();
3327 IEM_MC_END();
3328 }
3329 return VINF_SUCCESS;
3330 }
3331
3332 case 0: /* MMX Extension */
3333 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3335 {
3336 /*
3337 * Register, register.
3338 */
3339 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341
3342 IEM_MC_BEGIN(3, 0);
3343 IEM_MC_ARG(uint64_t *, pDst, 0);
3344 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3345 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3346 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3347 IEM_MC_PREPARE_FPU_USAGE();
3348 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3349 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3350 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3351 IEM_MC_ADVANCE_RIP();
3352 IEM_MC_END();
3353 }
3354 else
3355 {
3356 /*
3357 * Register, memory.
3358 */
3359 IEM_MC_BEGIN(3, 2);
3360 IEM_MC_ARG(uint64_t *, pDst, 0);
3361 IEM_MC_LOCAL(uint64_t, uSrc);
3362 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3364
3365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3366 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3367 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3369 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3370
3371 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3372 IEM_MC_PREPARE_FPU_USAGE();
3373 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3374 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3375
3376 IEM_MC_ADVANCE_RIP();
3377 IEM_MC_END();
3378 }
3379 return VINF_SUCCESS;
3380
3381 default:
3382 return IEMOP_RAISE_INVALID_OPCODE();
3383 }
3384}
3385
3386
3387/** Opcode 0x0f 0x71 11/2. */
3388FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3389
3390/** Opcode 0x66 0x0f 0x71 11/2. */
3391FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3392
3393/** Opcode 0x0f 0x71 11/4. */
3394FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3395
3396/** Opcode 0x66 0x0f 0x71 11/4. */
3397FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3398
3399/** Opcode 0x0f 0x71 11/6. */
3400FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3401
3402/** Opcode 0x66 0x0f 0x71 11/6. */
3403FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3404
3405
3406/** Opcode 0x0f 0x71. */
3407FNIEMOP_DEF(iemOp_Grp12)
3408{
3409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3410 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3411 return IEMOP_RAISE_INVALID_OPCODE();
3412 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3413 {
3414 case 0: case 1: case 3: case 5: case 7:
3415 return IEMOP_RAISE_INVALID_OPCODE();
3416 case 2:
3417 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3418 {
3419 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3420 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3421 default: return IEMOP_RAISE_INVALID_OPCODE();
3422 }
3423 case 4:
3424 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3425 {
3426 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3427 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3428 default: return IEMOP_RAISE_INVALID_OPCODE();
3429 }
3430 case 6:
3431 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3432 {
3433 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3434 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3435 default: return IEMOP_RAISE_INVALID_OPCODE();
3436 }
3437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3438 }
3439}
3440
3441
3442/** Opcode 0x0f 0x72 11/2. */
3443FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3444
3445/** Opcode 0x66 0x0f 0x72 11/2. */
3446FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3447
3448/** Opcode 0x0f 0x72 11/4. */
3449FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3450
3451/** Opcode 0x66 0x0f 0x72 11/4. */
3452FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3453
3454/** Opcode 0x0f 0x72 11/6. */
3455FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3456
3457/** Opcode 0x66 0x0f 0x72 11/6. */
3458FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3459
3460
3461/** Opcode 0x0f 0x72. */
3462FNIEMOP_DEF(iemOp_Grp13)
3463{
3464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3465 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3466 return IEMOP_RAISE_INVALID_OPCODE();
3467 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3468 {
3469 case 0: case 1: case 3: case 5: case 7:
3470 return IEMOP_RAISE_INVALID_OPCODE();
3471 case 2:
3472 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3473 {
3474 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3475 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3476 default: return IEMOP_RAISE_INVALID_OPCODE();
3477 }
3478 case 4:
3479 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3480 {
3481 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3482 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3483 default: return IEMOP_RAISE_INVALID_OPCODE();
3484 }
3485 case 6:
3486 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3487 {
3488 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3489 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3490 default: return IEMOP_RAISE_INVALID_OPCODE();
3491 }
3492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3493 }
3494}
3495
3496
3497/** Opcode 0x0f 0x73 11/2. */
3498FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3499
3500/** Opcode 0x66 0x0f 0x73 11/2. */
3501FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3502
3503/** Opcode 0x66 0x0f 0x73 11/3. */
3504FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3505
3506/** Opcode 0x0f 0x73 11/6. */
3507FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3508
3509/** Opcode 0x66 0x0f 0x73 11/6. */
3510FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3511
3512/** Opcode 0x66 0x0f 0x73 11/7. */
3513FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3514
3515
3516/** Opcode 0x0f 0x73. */
3517FNIEMOP_DEF(iemOp_Grp14)
3518{
3519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3520 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3521 return IEMOP_RAISE_INVALID_OPCODE();
3522 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3523 {
3524 case 0: case 1: case 4: case 5:
3525 return IEMOP_RAISE_INVALID_OPCODE();
3526 case 2:
3527 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3528 {
3529 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3530 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3531 default: return IEMOP_RAISE_INVALID_OPCODE();
3532 }
3533 case 3:
3534 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3535 {
3536 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3537 default: return IEMOP_RAISE_INVALID_OPCODE();
3538 }
3539 case 6:
3540 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3541 {
3542 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3543 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3544 default: return IEMOP_RAISE_INVALID_OPCODE();
3545 }
3546 case 7:
3547 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3548 {
3549 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3550 default: return IEMOP_RAISE_INVALID_OPCODE();
3551 }
3552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3553 }
3554}
3555
3556
3557/**
3558 * Common worker for SSE2 and MMX instructions on the forms:
3559 * pxxx mm1, mm2/mem64
3560 * pxxx xmm1, xmm2/mem128
3561 *
3562 * Proper alignment of the 128-bit operand is enforced.
3563 * Exceptions type 4. SSE2 and MMX cpuid checks.
3564 */
3565FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3566{
3567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3568 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3569 {
3570 case IEM_OP_PRF_SIZE_OP: /* SSE */
3571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3572 {
3573 /*
3574 * Register, register.
3575 */
3576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3577 IEM_MC_BEGIN(2, 0);
3578 IEM_MC_ARG(uint128_t *, pDst, 0);
3579 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3580 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3581 IEM_MC_PREPARE_SSE_USAGE();
3582 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3583 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3584 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3585 IEM_MC_ADVANCE_RIP();
3586 IEM_MC_END();
3587 }
3588 else
3589 {
3590 /*
3591 * Register, memory.
3592 */
3593 IEM_MC_BEGIN(2, 2);
3594 IEM_MC_ARG(uint128_t *, pDst, 0);
3595 IEM_MC_LOCAL(uint128_t, uSrc);
3596 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3598
3599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3601 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3602 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3603
3604 IEM_MC_PREPARE_SSE_USAGE();
3605 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3606 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3607
3608 IEM_MC_ADVANCE_RIP();
3609 IEM_MC_END();
3610 }
3611 return VINF_SUCCESS;
3612
3613 case 0: /* MMX */
3614 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3615 {
3616 /*
3617 * Register, register.
3618 */
3619 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3620 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3622 IEM_MC_BEGIN(2, 0);
3623 IEM_MC_ARG(uint64_t *, pDst, 0);
3624 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3625 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3626 IEM_MC_PREPARE_FPU_USAGE();
3627 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3628 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3629 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3630 IEM_MC_ADVANCE_RIP();
3631 IEM_MC_END();
3632 }
3633 else
3634 {
3635 /*
3636 * Register, memory.
3637 */
3638 IEM_MC_BEGIN(2, 2);
3639 IEM_MC_ARG(uint64_t *, pDst, 0);
3640 IEM_MC_LOCAL(uint64_t, uSrc);
3641 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3643
3644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3646 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3647 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3648
3649 IEM_MC_PREPARE_FPU_USAGE();
3650 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3651 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3652
3653 IEM_MC_ADVANCE_RIP();
3654 IEM_MC_END();
3655 }
3656 return VINF_SUCCESS;
3657
3658 default:
3659 return IEMOP_RAISE_INVALID_OPCODE();
3660 }
3661}
3662
3663
3664/** Opcode 0x0f 0x74. */
3665FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3666{
3667 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3668 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3669}
3670
3671
3672/** Opcode 0x0f 0x75. */
3673FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3674{
3675 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3676 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3677}
3678
3679
3680/** Opcode 0x0f 0x76. */
3681FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3682{
3683 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3684 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3685}
3686
3687
3688/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3689FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3690/* Opcode 0x66 0x0f 0x77 - invalid */
3691/* Opcode 0xf3 0x0f 0x77 - invalid */
3692/* Opcode 0xf2 0x0f 0x77 - invalid */
3693
3694/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3695FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3696/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3697FNIEMOP_STUB(iemOp_AmdGrp17);
3698/* Opcode 0xf3 0x0f 0x78 - invalid */
3699/* Opcode 0xf2 0x0f 0x78 - invalid */
3700
3701/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3702FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3703/* Opcode 0x66 0x0f 0x79 - invalid */
3704/* Opcode 0xf3 0x0f 0x79 - invalid */
3705/* Opcode 0xf2 0x0f 0x79 - invalid */
3706
3707/* Opcode 0x0f 0x7a - invalid */
3708/* Opcode 0x66 0x0f 0x7a - invalid */
3709/* Opcode 0xf3 0x0f 0x7a - invalid */
3710/* Opcode 0xf2 0x0f 0x7a - invalid */
3711
3712/* Opcode 0x0f 0x7b - invalid */
3713/* Opcode 0x66 0x0f 0x7b - invalid */
3714/* Opcode 0xf3 0x0f 0x7b - invalid */
3715/* Opcode 0xf2 0x0f 0x7b - invalid */
3716
3717/* Opcode 0x0f 0x7c - invalid */
3718/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3719FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3720/* Opcode 0xf3 0x0f 0x7c - invalid */
3721/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3722FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3723
3724/* Opcode 0x0f 0x7d - invalid */
3725/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3726FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3727/* Opcode 0xf3 0x0f 0x7d - invalid */
3728/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3729FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3730
3731
3732/** Opcode 0x0f 0x7e. */
3733FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3734{
3735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3736 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3737 {
3738 case IEM_OP_PRF_SIZE_OP: /* SSE */
3739 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3740 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3741 else
3742 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3743 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3744 {
3745 /* greg, XMM */
3746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3747 IEM_MC_BEGIN(0, 1);
3748 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3749 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3750 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3751 {
3752 IEM_MC_LOCAL(uint64_t, u64Tmp);
3753 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3754 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3755 }
3756 else
3757 {
3758 IEM_MC_LOCAL(uint32_t, u32Tmp);
3759 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3760 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3761 }
3762 IEM_MC_ADVANCE_RIP();
3763 IEM_MC_END();
3764 }
3765 else
3766 {
3767 /* [mem], XMM */
3768 IEM_MC_BEGIN(0, 2);
3769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3770 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3773 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3774 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3775 {
3776 IEM_MC_LOCAL(uint64_t, u64Tmp);
3777 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3778 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3779 }
3780 else
3781 {
3782 IEM_MC_LOCAL(uint32_t, u32Tmp);
3783 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3784 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3785 }
3786 IEM_MC_ADVANCE_RIP();
3787 IEM_MC_END();
3788 }
3789 return VINF_SUCCESS;
3790
3791 case 0: /* MMX */
3792 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3793 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3794 else
3795 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3797 {
3798 /* greg, MMX */
3799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3800 IEM_MC_BEGIN(0, 1);
3801 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3802 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3803 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3804 {
3805 IEM_MC_LOCAL(uint64_t, u64Tmp);
3806 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3807 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3808 }
3809 else
3810 {
3811 IEM_MC_LOCAL(uint32_t, u32Tmp);
3812 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3813 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3814 }
3815 IEM_MC_ADVANCE_RIP();
3816 IEM_MC_END();
3817 }
3818 else
3819 {
3820 /* [mem], MMX */
3821 IEM_MC_BEGIN(0, 2);
3822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3823 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3826 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3827 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3828 {
3829 IEM_MC_LOCAL(uint64_t, u64Tmp);
3830 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3831 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3832 }
3833 else
3834 {
3835 IEM_MC_LOCAL(uint32_t, u32Tmp);
3836 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3837 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3838 }
3839 IEM_MC_ADVANCE_RIP();
3840 IEM_MC_END();
3841 }
3842 return VINF_SUCCESS;
3843
3844 default:
3845 return IEMOP_RAISE_INVALID_OPCODE();
3846 }
3847}
3848
3849
3850/** Opcode 0x0f 0x7f. */
3851FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3852{
3853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3854 bool fAligned = false;
3855 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3856 {
3857 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3858 fAligned = true;
3859 /* fall thru */
3860 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3861 if (fAligned)
3862 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3863 else
3864 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3866 {
3867 /*
3868 * Register, register.
3869 */
3870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3871 IEM_MC_BEGIN(0, 0);
3872 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3873 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3874 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3875 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3876 IEM_MC_ADVANCE_RIP();
3877 IEM_MC_END();
3878 }
3879 else
3880 {
3881 /*
3882 * Register, memory.
3883 */
3884 IEM_MC_BEGIN(0, 2);
3885 IEM_MC_LOCAL(uint128_t, u128Tmp);
3886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3887
3888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3891 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3892
3893 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3894 if (fAligned)
3895 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3896 else
3897 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3898
3899 IEM_MC_ADVANCE_RIP();
3900 IEM_MC_END();
3901 }
3902 return VINF_SUCCESS;
3903
3904 case 0: /* MMX */
3905 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3906
3907 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3908 {
3909 /*
3910 * Register, register.
3911 */
3912 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3913 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3915 IEM_MC_BEGIN(0, 1);
3916 IEM_MC_LOCAL(uint64_t, u64Tmp);
3917 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3918 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3919 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3920 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3921 IEM_MC_ADVANCE_RIP();
3922 IEM_MC_END();
3923 }
3924 else
3925 {
3926 /*
3927 * Register, memory.
3928 */
3929 IEM_MC_BEGIN(0, 2);
3930 IEM_MC_LOCAL(uint64_t, u64Tmp);
3931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3932
3933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3935 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3936 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3937
3938 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3939 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3940
3941 IEM_MC_ADVANCE_RIP();
3942 IEM_MC_END();
3943 }
3944 return VINF_SUCCESS;
3945
3946 default:
3947 return IEMOP_RAISE_INVALID_OPCODE();
3948 }
3949}
3950
3951
3952
3953/** Opcode 0x0f 0x80. */
3954FNIEMOP_DEF(iemOp_jo_Jv)
3955{
3956 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3957 IEMOP_HLP_MIN_386();
3958 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3959 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3960 {
3961 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3963
3964 IEM_MC_BEGIN(0, 0);
3965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3966 IEM_MC_REL_JMP_S16(i16Imm);
3967 } IEM_MC_ELSE() {
3968 IEM_MC_ADVANCE_RIP();
3969 } IEM_MC_ENDIF();
3970 IEM_MC_END();
3971 }
3972 else
3973 {
3974 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3976
3977 IEM_MC_BEGIN(0, 0);
3978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3979 IEM_MC_REL_JMP_S32(i32Imm);
3980 } IEM_MC_ELSE() {
3981 IEM_MC_ADVANCE_RIP();
3982 } IEM_MC_ENDIF();
3983 IEM_MC_END();
3984 }
3985 return VINF_SUCCESS;
3986}
3987
3988
3989/** Opcode 0x0f 0x81. */
3990FNIEMOP_DEF(iemOp_jno_Jv)
3991{
3992 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3993 IEMOP_HLP_MIN_386();
3994 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3995 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3996 {
3997 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3999
4000 IEM_MC_BEGIN(0, 0);
4001 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4002 IEM_MC_ADVANCE_RIP();
4003 } IEM_MC_ELSE() {
4004 IEM_MC_REL_JMP_S16(i16Imm);
4005 } IEM_MC_ENDIF();
4006 IEM_MC_END();
4007 }
4008 else
4009 {
4010 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4012
4013 IEM_MC_BEGIN(0, 0);
4014 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4015 IEM_MC_ADVANCE_RIP();
4016 } IEM_MC_ELSE() {
4017 IEM_MC_REL_JMP_S32(i32Imm);
4018 } IEM_MC_ENDIF();
4019 IEM_MC_END();
4020 }
4021 return VINF_SUCCESS;
4022}
4023
4024
4025/** Opcode 0x0f 0x82. */
4026FNIEMOP_DEF(iemOp_jc_Jv)
4027{
4028 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4029 IEMOP_HLP_MIN_386();
4030 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4031 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4032 {
4033 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4035
4036 IEM_MC_BEGIN(0, 0);
4037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4038 IEM_MC_REL_JMP_S16(i16Imm);
4039 } IEM_MC_ELSE() {
4040 IEM_MC_ADVANCE_RIP();
4041 } IEM_MC_ENDIF();
4042 IEM_MC_END();
4043 }
4044 else
4045 {
4046 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4048
4049 IEM_MC_BEGIN(0, 0);
4050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4051 IEM_MC_REL_JMP_S32(i32Imm);
4052 } IEM_MC_ELSE() {
4053 IEM_MC_ADVANCE_RIP();
4054 } IEM_MC_ENDIF();
4055 IEM_MC_END();
4056 }
4057 return VINF_SUCCESS;
4058}
4059
4060
4061/** Opcode 0x0f 0x83. */
4062FNIEMOP_DEF(iemOp_jnc_Jv)
4063{
4064 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4065 IEMOP_HLP_MIN_386();
4066 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4067 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4068 {
4069 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4071
4072 IEM_MC_BEGIN(0, 0);
4073 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4074 IEM_MC_ADVANCE_RIP();
4075 } IEM_MC_ELSE() {
4076 IEM_MC_REL_JMP_S16(i16Imm);
4077 } IEM_MC_ENDIF();
4078 IEM_MC_END();
4079 }
4080 else
4081 {
4082 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4084
4085 IEM_MC_BEGIN(0, 0);
4086 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4087 IEM_MC_ADVANCE_RIP();
4088 } IEM_MC_ELSE() {
4089 IEM_MC_REL_JMP_S32(i32Imm);
4090 } IEM_MC_ENDIF();
4091 IEM_MC_END();
4092 }
4093 return VINF_SUCCESS;
4094}
4095
4096
4097/** Opcode 0x0f 0x84. */
4098FNIEMOP_DEF(iemOp_je_Jv)
4099{
4100 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4101 IEMOP_HLP_MIN_386();
4102 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4103 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4104 {
4105 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4107
4108 IEM_MC_BEGIN(0, 0);
4109 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4110 IEM_MC_REL_JMP_S16(i16Imm);
4111 } IEM_MC_ELSE() {
4112 IEM_MC_ADVANCE_RIP();
4113 } IEM_MC_ENDIF();
4114 IEM_MC_END();
4115 }
4116 else
4117 {
4118 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120
4121 IEM_MC_BEGIN(0, 0);
4122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4123 IEM_MC_REL_JMP_S32(i32Imm);
4124 } IEM_MC_ELSE() {
4125 IEM_MC_ADVANCE_RIP();
4126 } IEM_MC_ENDIF();
4127 IEM_MC_END();
4128 }
4129 return VINF_SUCCESS;
4130}
4131
4132
4133/** Opcode 0x0f 0x85. */
4134FNIEMOP_DEF(iemOp_jne_Jv)
4135{
4136 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4137 IEMOP_HLP_MIN_386();
4138 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4139 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4140 {
4141 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4143
4144 IEM_MC_BEGIN(0, 0);
4145 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4146 IEM_MC_ADVANCE_RIP();
4147 } IEM_MC_ELSE() {
4148 IEM_MC_REL_JMP_S16(i16Imm);
4149 } IEM_MC_ENDIF();
4150 IEM_MC_END();
4151 }
4152 else
4153 {
4154 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156
4157 IEM_MC_BEGIN(0, 0);
4158 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4159 IEM_MC_ADVANCE_RIP();
4160 } IEM_MC_ELSE() {
4161 IEM_MC_REL_JMP_S32(i32Imm);
4162 } IEM_MC_ENDIF();
4163 IEM_MC_END();
4164 }
4165 return VINF_SUCCESS;
4166}
4167
4168
4169/** Opcode 0x0f 0x86. */
4170FNIEMOP_DEF(iemOp_jbe_Jv)
4171{
4172 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4173 IEMOP_HLP_MIN_386();
4174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4175 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4176 {
4177 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4179
4180 IEM_MC_BEGIN(0, 0);
4181 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4182 IEM_MC_REL_JMP_S16(i16Imm);
4183 } IEM_MC_ELSE() {
4184 IEM_MC_ADVANCE_RIP();
4185 } IEM_MC_ENDIF();
4186 IEM_MC_END();
4187 }
4188 else
4189 {
4190 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4192
4193 IEM_MC_BEGIN(0, 0);
4194 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4195 IEM_MC_REL_JMP_S32(i32Imm);
4196 } IEM_MC_ELSE() {
4197 IEM_MC_ADVANCE_RIP();
4198 } IEM_MC_ENDIF();
4199 IEM_MC_END();
4200 }
4201 return VINF_SUCCESS;
4202}
4203
4204
4205/** Opcode 0x0f 0x87. */
4206FNIEMOP_DEF(iemOp_jnbe_Jv)
4207{
4208 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4209 IEMOP_HLP_MIN_386();
4210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4211 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4212 {
4213 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4215
4216 IEM_MC_BEGIN(0, 0);
4217 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4218 IEM_MC_ADVANCE_RIP();
4219 } IEM_MC_ELSE() {
4220 IEM_MC_REL_JMP_S16(i16Imm);
4221 } IEM_MC_ENDIF();
4222 IEM_MC_END();
4223 }
4224 else
4225 {
4226 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4228
4229 IEM_MC_BEGIN(0, 0);
4230 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4231 IEM_MC_ADVANCE_RIP();
4232 } IEM_MC_ELSE() {
4233 IEM_MC_REL_JMP_S32(i32Imm);
4234 } IEM_MC_ENDIF();
4235 IEM_MC_END();
4236 }
4237 return VINF_SUCCESS;
4238}
4239
4240
4241/** Opcode 0x0f 0x88. */
4242FNIEMOP_DEF(iemOp_js_Jv)
4243{
4244 IEMOP_MNEMONIC(js_Jv, "js Jv");
4245 IEMOP_HLP_MIN_386();
4246 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4247 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4248 {
4249 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4251
4252 IEM_MC_BEGIN(0, 0);
4253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4254 IEM_MC_REL_JMP_S16(i16Imm);
4255 } IEM_MC_ELSE() {
4256 IEM_MC_ADVANCE_RIP();
4257 } IEM_MC_ENDIF();
4258 IEM_MC_END();
4259 }
4260 else
4261 {
4262 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4264
4265 IEM_MC_BEGIN(0, 0);
4266 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4267 IEM_MC_REL_JMP_S32(i32Imm);
4268 } IEM_MC_ELSE() {
4269 IEM_MC_ADVANCE_RIP();
4270 } IEM_MC_ENDIF();
4271 IEM_MC_END();
4272 }
4273 return VINF_SUCCESS;
4274}
4275
4276
4277/** Opcode 0x0f 0x89. */
4278FNIEMOP_DEF(iemOp_jns_Jv)
4279{
4280 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4281 IEMOP_HLP_MIN_386();
4282 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4283 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4284 {
4285 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4287
4288 IEM_MC_BEGIN(0, 0);
4289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4290 IEM_MC_ADVANCE_RIP();
4291 } IEM_MC_ELSE() {
4292 IEM_MC_REL_JMP_S16(i16Imm);
4293 } IEM_MC_ENDIF();
4294 IEM_MC_END();
4295 }
4296 else
4297 {
4298 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4300
4301 IEM_MC_BEGIN(0, 0);
4302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4303 IEM_MC_ADVANCE_RIP();
4304 } IEM_MC_ELSE() {
4305 IEM_MC_REL_JMP_S32(i32Imm);
4306 } IEM_MC_ENDIF();
4307 IEM_MC_END();
4308 }
4309 return VINF_SUCCESS;
4310}
4311
4312
4313/** Opcode 0x0f 0x8a. */
4314FNIEMOP_DEF(iemOp_jp_Jv)
4315{
4316 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4317 IEMOP_HLP_MIN_386();
4318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4319 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4320 {
4321 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4323
4324 IEM_MC_BEGIN(0, 0);
4325 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4326 IEM_MC_REL_JMP_S16(i16Imm);
4327 } IEM_MC_ELSE() {
4328 IEM_MC_ADVANCE_RIP();
4329 } IEM_MC_ENDIF();
4330 IEM_MC_END();
4331 }
4332 else
4333 {
4334 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4336
4337 IEM_MC_BEGIN(0, 0);
4338 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4339 IEM_MC_REL_JMP_S32(i32Imm);
4340 } IEM_MC_ELSE() {
4341 IEM_MC_ADVANCE_RIP();
4342 } IEM_MC_ENDIF();
4343 IEM_MC_END();
4344 }
4345 return VINF_SUCCESS;
4346}
4347
4348
4349/** Opcode 0x0f 0x8b. */
4350FNIEMOP_DEF(iemOp_jnp_Jv)
4351{
4352 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4353 IEMOP_HLP_MIN_386();
4354 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4355 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4356 {
4357 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4359
4360 IEM_MC_BEGIN(0, 0);
4361 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4362 IEM_MC_ADVANCE_RIP();
4363 } IEM_MC_ELSE() {
4364 IEM_MC_REL_JMP_S16(i16Imm);
4365 } IEM_MC_ENDIF();
4366 IEM_MC_END();
4367 }
4368 else
4369 {
4370 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372
4373 IEM_MC_BEGIN(0, 0);
4374 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4375 IEM_MC_ADVANCE_RIP();
4376 } IEM_MC_ELSE() {
4377 IEM_MC_REL_JMP_S32(i32Imm);
4378 } IEM_MC_ENDIF();
4379 IEM_MC_END();
4380 }
4381 return VINF_SUCCESS;
4382}
4383
4384
4385/** Opcode 0x0f 0x8c. */
4386FNIEMOP_DEF(iemOp_jl_Jv)
4387{
4388 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4389 IEMOP_HLP_MIN_386();
4390 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4391 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4392 {
4393 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4395
4396 IEM_MC_BEGIN(0, 0);
4397 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4398 IEM_MC_REL_JMP_S16(i16Imm);
4399 } IEM_MC_ELSE() {
4400 IEM_MC_ADVANCE_RIP();
4401 } IEM_MC_ENDIF();
4402 IEM_MC_END();
4403 }
4404 else
4405 {
4406 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4408
4409 IEM_MC_BEGIN(0, 0);
4410 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4411 IEM_MC_REL_JMP_S32(i32Imm);
4412 } IEM_MC_ELSE() {
4413 IEM_MC_ADVANCE_RIP();
4414 } IEM_MC_ENDIF();
4415 IEM_MC_END();
4416 }
4417 return VINF_SUCCESS;
4418}
4419
4420
4421/** Opcode 0x0f 0x8d. */
4422FNIEMOP_DEF(iemOp_jnl_Jv)
4423{
4424 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4425 IEMOP_HLP_MIN_386();
4426 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4427 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4428 {
4429 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4431
4432 IEM_MC_BEGIN(0, 0);
4433 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4434 IEM_MC_ADVANCE_RIP();
4435 } IEM_MC_ELSE() {
4436 IEM_MC_REL_JMP_S16(i16Imm);
4437 } IEM_MC_ENDIF();
4438 IEM_MC_END();
4439 }
4440 else
4441 {
4442 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4444
4445 IEM_MC_BEGIN(0, 0);
4446 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4447 IEM_MC_ADVANCE_RIP();
4448 } IEM_MC_ELSE() {
4449 IEM_MC_REL_JMP_S32(i32Imm);
4450 } IEM_MC_ENDIF();
4451 IEM_MC_END();
4452 }
4453 return VINF_SUCCESS;
4454}
4455
4456
4457/** Opcode 0x0f 0x8e. */
4458FNIEMOP_DEF(iemOp_jle_Jv)
4459{
4460 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4461 IEMOP_HLP_MIN_386();
4462 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4463 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4464 {
4465 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4467
4468 IEM_MC_BEGIN(0, 0);
4469 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4470 IEM_MC_REL_JMP_S16(i16Imm);
4471 } IEM_MC_ELSE() {
4472 IEM_MC_ADVANCE_RIP();
4473 } IEM_MC_ENDIF();
4474 IEM_MC_END();
4475 }
4476 else
4477 {
4478 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4480
4481 IEM_MC_BEGIN(0, 0);
4482 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4483 IEM_MC_REL_JMP_S32(i32Imm);
4484 } IEM_MC_ELSE() {
4485 IEM_MC_ADVANCE_RIP();
4486 } IEM_MC_ENDIF();
4487 IEM_MC_END();
4488 }
4489 return VINF_SUCCESS;
4490}
4491
4492
4493/** Opcode 0x0f 0x8f. */
4494FNIEMOP_DEF(iemOp_jnle_Jv)
4495{
4496 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4497 IEMOP_HLP_MIN_386();
4498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4499 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4500 {
4501 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4503
4504 IEM_MC_BEGIN(0, 0);
4505 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4506 IEM_MC_ADVANCE_RIP();
4507 } IEM_MC_ELSE() {
4508 IEM_MC_REL_JMP_S16(i16Imm);
4509 } IEM_MC_ENDIF();
4510 IEM_MC_END();
4511 }
4512 else
4513 {
4514 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4516
4517 IEM_MC_BEGIN(0, 0);
4518 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4519 IEM_MC_ADVANCE_RIP();
4520 } IEM_MC_ELSE() {
4521 IEM_MC_REL_JMP_S32(i32Imm);
4522 } IEM_MC_ENDIF();
4523 IEM_MC_END();
4524 }
4525 return VINF_SUCCESS;
4526}
4527
4528
4529/** Opcode 0x0f 0x90. */
4530FNIEMOP_DEF(iemOp_seto_Eb)
4531{
4532 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4533 IEMOP_HLP_MIN_386();
4534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4535
4536 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4537 * any way. AMD says it's "unused", whatever that means. We're
4538 * ignoring for now. */
4539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4540 {
4541 /* register target */
4542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4543 IEM_MC_BEGIN(0, 0);
4544 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4545 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4546 } IEM_MC_ELSE() {
4547 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4548 } IEM_MC_ENDIF();
4549 IEM_MC_ADVANCE_RIP();
4550 IEM_MC_END();
4551 }
4552 else
4553 {
4554 /* memory target */
4555 IEM_MC_BEGIN(0, 1);
4556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4560 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4561 } IEM_MC_ELSE() {
4562 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4563 } IEM_MC_ENDIF();
4564 IEM_MC_ADVANCE_RIP();
4565 IEM_MC_END();
4566 }
4567 return VINF_SUCCESS;
4568}
4569
4570
4571/** Opcode 0x0f 0x91. */
4572FNIEMOP_DEF(iemOp_setno_Eb)
4573{
4574 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4575 IEMOP_HLP_MIN_386();
4576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4577
4578 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4579 * any way. AMD says it's "unused", whatever that means. We're
4580 * ignoring for now. */
4581 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4582 {
4583 /* register target */
4584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4585 IEM_MC_BEGIN(0, 0);
4586 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4587 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4588 } IEM_MC_ELSE() {
4589 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4590 } IEM_MC_ENDIF();
4591 IEM_MC_ADVANCE_RIP();
4592 IEM_MC_END();
4593 }
4594 else
4595 {
4596 /* memory target */
4597 IEM_MC_BEGIN(0, 1);
4598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4601 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4602 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4603 } IEM_MC_ELSE() {
4604 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4605 } IEM_MC_ENDIF();
4606 IEM_MC_ADVANCE_RIP();
4607 IEM_MC_END();
4608 }
4609 return VINF_SUCCESS;
4610}
4611
4612
4613/** Opcode 0x0f 0x92. */
4614FNIEMOP_DEF(iemOp_setc_Eb)
4615{
4616 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4617 IEMOP_HLP_MIN_386();
4618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4619
4620 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4621 * any way. AMD says it's "unused", whatever that means. We're
4622 * ignoring for now. */
4623 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4624 {
4625 /* register target */
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627 IEM_MC_BEGIN(0, 0);
4628 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4629 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4630 } IEM_MC_ELSE() {
4631 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4632 } IEM_MC_ENDIF();
4633 IEM_MC_ADVANCE_RIP();
4634 IEM_MC_END();
4635 }
4636 else
4637 {
4638 /* memory target */
4639 IEM_MC_BEGIN(0, 1);
4640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4644 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4645 } IEM_MC_ELSE() {
4646 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4647 } IEM_MC_ENDIF();
4648 IEM_MC_ADVANCE_RIP();
4649 IEM_MC_END();
4650 }
4651 return VINF_SUCCESS;
4652}
4653
4654
4655/** Opcode 0x0f 0x93. */
4656FNIEMOP_DEF(iemOp_setnc_Eb)
4657{
4658 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4659 IEMOP_HLP_MIN_386();
4660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4661
4662 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4663 * any way. AMD says it's "unused", whatever that means. We're
4664 * ignoring for now. */
4665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4666 {
4667 /* register target */
4668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4669 IEM_MC_BEGIN(0, 0);
4670 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4671 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4672 } IEM_MC_ELSE() {
4673 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4674 } IEM_MC_ENDIF();
4675 IEM_MC_ADVANCE_RIP();
4676 IEM_MC_END();
4677 }
4678 else
4679 {
4680 /* memory target */
4681 IEM_MC_BEGIN(0, 1);
4682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4685 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4686 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4687 } IEM_MC_ELSE() {
4688 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4689 } IEM_MC_ENDIF();
4690 IEM_MC_ADVANCE_RIP();
4691 IEM_MC_END();
4692 }
4693 return VINF_SUCCESS;
4694}
4695
4696
4697/** Opcode 0x0f 0x94. */
4698FNIEMOP_DEF(iemOp_sete_Eb)
4699{
4700 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4701 IEMOP_HLP_MIN_386();
4702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4703
4704 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4705 * any way. AMD says it's "unused", whatever that means. We're
4706 * ignoring for now. */
4707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4708 {
4709 /* register target */
4710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4711 IEM_MC_BEGIN(0, 0);
4712 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4713 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4714 } IEM_MC_ELSE() {
4715 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4716 } IEM_MC_ENDIF();
4717 IEM_MC_ADVANCE_RIP();
4718 IEM_MC_END();
4719 }
4720 else
4721 {
4722 /* memory target */
4723 IEM_MC_BEGIN(0, 1);
4724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4727 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4728 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4729 } IEM_MC_ELSE() {
4730 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4731 } IEM_MC_ENDIF();
4732 IEM_MC_ADVANCE_RIP();
4733 IEM_MC_END();
4734 }
4735 return VINF_SUCCESS;
4736}
4737
4738
4739/** Opcode 0x0f 0x95. */
4740FNIEMOP_DEF(iemOp_setne_Eb)
4741{
4742 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4743 IEMOP_HLP_MIN_386();
4744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4745
4746 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4747 * any way. AMD says it's "unused", whatever that means. We're
4748 * ignoring for now. */
4749 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4750 {
4751 /* register target */
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753 IEM_MC_BEGIN(0, 0);
4754 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4755 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4756 } IEM_MC_ELSE() {
4757 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4758 } IEM_MC_ENDIF();
4759 IEM_MC_ADVANCE_RIP();
4760 IEM_MC_END();
4761 }
4762 else
4763 {
4764 /* memory target */
4765 IEM_MC_BEGIN(0, 1);
4766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4769 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4770 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4771 } IEM_MC_ELSE() {
4772 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4773 } IEM_MC_ENDIF();
4774 IEM_MC_ADVANCE_RIP();
4775 IEM_MC_END();
4776 }
4777 return VINF_SUCCESS;
4778}
4779
4780
4781/** Opcode 0x0f 0x96. */
4782FNIEMOP_DEF(iemOp_setbe_Eb)
4783{
4784 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4785 IEMOP_HLP_MIN_386();
4786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4787
4788 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4789 * any way. AMD says it's "unused", whatever that means. We're
4790 * ignoring for now. */
4791 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4792 {
4793 /* register target */
4794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4795 IEM_MC_BEGIN(0, 0);
4796 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4797 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4798 } IEM_MC_ELSE() {
4799 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4800 } IEM_MC_ENDIF();
4801 IEM_MC_ADVANCE_RIP();
4802 IEM_MC_END();
4803 }
4804 else
4805 {
4806 /* memory target */
4807 IEM_MC_BEGIN(0, 1);
4808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4811 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4812 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4813 } IEM_MC_ELSE() {
4814 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4815 } IEM_MC_ENDIF();
4816 IEM_MC_ADVANCE_RIP();
4817 IEM_MC_END();
4818 }
4819 return VINF_SUCCESS;
4820}
4821
4822
4823/** Opcode 0x0f 0x97. */
4824FNIEMOP_DEF(iemOp_setnbe_Eb)
4825{
4826 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4827 IEMOP_HLP_MIN_386();
4828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4829
4830 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4831 * any way. AMD says it's "unused", whatever that means. We're
4832 * ignoring for now. */
4833 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4834 {
4835 /* register target */
4836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4837 IEM_MC_BEGIN(0, 0);
4838 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4839 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4840 } IEM_MC_ELSE() {
4841 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4842 } IEM_MC_ENDIF();
4843 IEM_MC_ADVANCE_RIP();
4844 IEM_MC_END();
4845 }
4846 else
4847 {
4848 /* memory target */
4849 IEM_MC_BEGIN(0, 1);
4850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4853 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4854 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4855 } IEM_MC_ELSE() {
4856 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4857 } IEM_MC_ENDIF();
4858 IEM_MC_ADVANCE_RIP();
4859 IEM_MC_END();
4860 }
4861 return VINF_SUCCESS;
4862}
4863
4864
4865/** Opcode 0x0f 0x98. */
4866FNIEMOP_DEF(iemOp_sets_Eb)
4867{
4868 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4869 IEMOP_HLP_MIN_386();
4870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4871
4872 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4873 * any way. AMD says it's "unused", whatever that means. We're
4874 * ignoring for now. */
4875 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4876 {
4877 /* register target */
4878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4879 IEM_MC_BEGIN(0, 0);
4880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4881 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4882 } IEM_MC_ELSE() {
4883 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4884 } IEM_MC_ENDIF();
4885 IEM_MC_ADVANCE_RIP();
4886 IEM_MC_END();
4887 }
4888 else
4889 {
4890 /* memory target */
4891 IEM_MC_BEGIN(0, 1);
4892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4895 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4896 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4897 } IEM_MC_ELSE() {
4898 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4899 } IEM_MC_ENDIF();
4900 IEM_MC_ADVANCE_RIP();
4901 IEM_MC_END();
4902 }
4903 return VINF_SUCCESS;
4904}
4905
4906
4907/** Opcode 0x0f 0x99. */
4908FNIEMOP_DEF(iemOp_setns_Eb)
4909{
4910 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4911 IEMOP_HLP_MIN_386();
4912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4913
4914 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4915 * any way. AMD says it's "unused", whatever that means. We're
4916 * ignoring for now. */
4917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4918 {
4919 /* register target */
4920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4921 IEM_MC_BEGIN(0, 0);
4922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4923 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4924 } IEM_MC_ELSE() {
4925 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4926 } IEM_MC_ENDIF();
4927 IEM_MC_ADVANCE_RIP();
4928 IEM_MC_END();
4929 }
4930 else
4931 {
4932 /* memory target */
4933 IEM_MC_BEGIN(0, 1);
4934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4937 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4938 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4939 } IEM_MC_ELSE() {
4940 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4941 } IEM_MC_ENDIF();
4942 IEM_MC_ADVANCE_RIP();
4943 IEM_MC_END();
4944 }
4945 return VINF_SUCCESS;
4946}
4947
4948
4949/** Opcode 0x0f 0x9a. */
4950FNIEMOP_DEF(iemOp_setp_Eb)
4951{
4952 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4953 IEMOP_HLP_MIN_386();
4954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4955
4956 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4957 * any way. AMD says it's "unused", whatever that means. We're
4958 * ignoring for now. */
4959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4960 {
4961 /* register target */
4962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4963 IEM_MC_BEGIN(0, 0);
4964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4965 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4966 } IEM_MC_ELSE() {
4967 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4968 } IEM_MC_ENDIF();
4969 IEM_MC_ADVANCE_RIP();
4970 IEM_MC_END();
4971 }
4972 else
4973 {
4974 /* memory target */
4975 IEM_MC_BEGIN(0, 1);
4976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4979 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4980 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4981 } IEM_MC_ELSE() {
4982 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4983 } IEM_MC_ENDIF();
4984 IEM_MC_ADVANCE_RIP();
4985 IEM_MC_END();
4986 }
4987 return VINF_SUCCESS;
4988}
4989
4990
4991/** Opcode 0x0f 0x9b. */
4992FNIEMOP_DEF(iemOp_setnp_Eb)
4993{
4994 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4995 IEMOP_HLP_MIN_386();
4996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4997
4998 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4999 * any way. AMD says it's "unused", whatever that means. We're
5000 * ignoring for now. */
5001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5002 {
5003 /* register target */
5004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5005 IEM_MC_BEGIN(0, 0);
5006 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5007 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5008 } IEM_MC_ELSE() {
5009 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5010 } IEM_MC_ENDIF();
5011 IEM_MC_ADVANCE_RIP();
5012 IEM_MC_END();
5013 }
5014 else
5015 {
5016 /* memory target */
5017 IEM_MC_BEGIN(0, 1);
5018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5021 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5022 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5023 } IEM_MC_ELSE() {
5024 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5025 } IEM_MC_ENDIF();
5026 IEM_MC_ADVANCE_RIP();
5027 IEM_MC_END();
5028 }
5029 return VINF_SUCCESS;
5030}
5031
5032
5033/** Opcode 0x0f 0x9c. */
5034FNIEMOP_DEF(iemOp_setl_Eb)
5035{
5036 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5037 IEMOP_HLP_MIN_386();
5038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5039
5040 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5041 * any way. AMD says it's "unused", whatever that means. We're
5042 * ignoring for now. */
5043 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5044 {
5045 /* register target */
5046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5047 IEM_MC_BEGIN(0, 0);
5048 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5049 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5050 } IEM_MC_ELSE() {
5051 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5052 } IEM_MC_ENDIF();
5053 IEM_MC_ADVANCE_RIP();
5054 IEM_MC_END();
5055 }
5056 else
5057 {
5058 /* memory target */
5059 IEM_MC_BEGIN(0, 1);
5060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5063 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5064 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5065 } IEM_MC_ELSE() {
5066 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5067 } IEM_MC_ENDIF();
5068 IEM_MC_ADVANCE_RIP();
5069 IEM_MC_END();
5070 }
5071 return VINF_SUCCESS;
5072}
5073
5074
5075/** Opcode 0x0f 0x9d. */
5076FNIEMOP_DEF(iemOp_setnl_Eb)
5077{
5078 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5079 IEMOP_HLP_MIN_386();
5080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5081
5082 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5083 * any way. AMD says it's "unused", whatever that means. We're
5084 * ignoring for now. */
5085 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5086 {
5087 /* register target */
5088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5089 IEM_MC_BEGIN(0, 0);
5090 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5091 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5092 } IEM_MC_ELSE() {
5093 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5094 } IEM_MC_ENDIF();
5095 IEM_MC_ADVANCE_RIP();
5096 IEM_MC_END();
5097 }
5098 else
5099 {
5100 /* memory target */
5101 IEM_MC_BEGIN(0, 1);
5102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5105 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5106 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5107 } IEM_MC_ELSE() {
5108 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5109 } IEM_MC_ENDIF();
5110 IEM_MC_ADVANCE_RIP();
5111 IEM_MC_END();
5112 }
5113 return VINF_SUCCESS;
5114}
5115
5116
5117/** Opcode 0x0f 0x9e. */
5118FNIEMOP_DEF(iemOp_setle_Eb)
5119{
5120 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5121 IEMOP_HLP_MIN_386();
5122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5123
5124 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5125 * any way. AMD says it's "unused", whatever that means. We're
5126 * ignoring for now. */
5127 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5128 {
5129 /* register target */
5130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5131 IEM_MC_BEGIN(0, 0);
5132 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5133 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5134 } IEM_MC_ELSE() {
5135 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5136 } IEM_MC_ENDIF();
5137 IEM_MC_ADVANCE_RIP();
5138 IEM_MC_END();
5139 }
5140 else
5141 {
5142 /* memory target */
5143 IEM_MC_BEGIN(0, 1);
5144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5147 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5148 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5149 } IEM_MC_ELSE() {
5150 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5151 } IEM_MC_ENDIF();
5152 IEM_MC_ADVANCE_RIP();
5153 IEM_MC_END();
5154 }
5155 return VINF_SUCCESS;
5156}
5157
5158
5159/** Opcode 0x0f 0x9f. */
5160FNIEMOP_DEF(iemOp_setnle_Eb)
5161{
5162 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5163 IEMOP_HLP_MIN_386();
5164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5165
5166 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5167 * any way. AMD says it's "unused", whatever that means. We're
5168 * ignoring for now. */
5169 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5170 {
5171 /* register target */
5172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5173 IEM_MC_BEGIN(0, 0);
5174 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5175 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5176 } IEM_MC_ELSE() {
5177 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5178 } IEM_MC_ENDIF();
5179 IEM_MC_ADVANCE_RIP();
5180 IEM_MC_END();
5181 }
5182 else
5183 {
5184 /* memory target */
5185 IEM_MC_BEGIN(0, 1);
5186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5189 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5190 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5191 } IEM_MC_ELSE() {
5192 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5193 } IEM_MC_ENDIF();
5194 IEM_MC_ADVANCE_RIP();
5195 IEM_MC_END();
5196 }
5197 return VINF_SUCCESS;
5198}
5199
5200
5201/**
5202 * Common 'push segment-register' helper.
5203 */
5204FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5205{
5206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5207 if (iReg < X86_SREG_FS)
5208 IEMOP_HLP_NO_64BIT();
5209 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5210
5211 switch (pVCpu->iem.s.enmEffOpSize)
5212 {
5213 case IEMMODE_16BIT:
5214 IEM_MC_BEGIN(0, 1);
5215 IEM_MC_LOCAL(uint16_t, u16Value);
5216 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5217 IEM_MC_PUSH_U16(u16Value);
5218 IEM_MC_ADVANCE_RIP();
5219 IEM_MC_END();
5220 break;
5221
5222 case IEMMODE_32BIT:
5223 IEM_MC_BEGIN(0, 1);
5224 IEM_MC_LOCAL(uint32_t, u32Value);
5225 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5226 IEM_MC_PUSH_U32_SREG(u32Value);
5227 IEM_MC_ADVANCE_RIP();
5228 IEM_MC_END();
5229 break;
5230
5231 case IEMMODE_64BIT:
5232 IEM_MC_BEGIN(0, 1);
5233 IEM_MC_LOCAL(uint64_t, u64Value);
5234 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5235 IEM_MC_PUSH_U64(u64Value);
5236 IEM_MC_ADVANCE_RIP();
5237 IEM_MC_END();
5238 break;
5239 }
5240
5241 return VINF_SUCCESS;
5242}
5243
5244
5245/** Opcode 0x0f 0xa0. */
5246FNIEMOP_DEF(iemOp_push_fs)
5247{
5248 IEMOP_MNEMONIC(push_fs, "push fs");
5249 IEMOP_HLP_MIN_386();
5250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5251 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5252}
5253
5254
5255/** Opcode 0x0f 0xa1. */
5256FNIEMOP_DEF(iemOp_pop_fs)
5257{
5258 IEMOP_MNEMONIC(pop_fs, "pop fs");
5259 IEMOP_HLP_MIN_386();
5260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5261 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5262}
5263
5264
5265/** Opcode 0x0f 0xa2. */
5266FNIEMOP_DEF(iemOp_cpuid)
5267{
5268 IEMOP_MNEMONIC(cpuid, "cpuid");
5269 IEMOP_HLP_MIN_486(); /* not all 486es. */
5270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5271 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5272}
5273
5274
5275/**
5276 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5277 * iemOp_bts_Ev_Gv.
5278 */
5279FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5280{
5281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5282 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5283
5284 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5285 {
5286 /* register destination. */
5287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5288 switch (pVCpu->iem.s.enmEffOpSize)
5289 {
5290 case IEMMODE_16BIT:
5291 IEM_MC_BEGIN(3, 0);
5292 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5293 IEM_MC_ARG(uint16_t, u16Src, 1);
5294 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5295
5296 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5297 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5298 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5299 IEM_MC_REF_EFLAGS(pEFlags);
5300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5301
5302 IEM_MC_ADVANCE_RIP();
5303 IEM_MC_END();
5304 return VINF_SUCCESS;
5305
5306 case IEMMODE_32BIT:
5307 IEM_MC_BEGIN(3, 0);
5308 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5309 IEM_MC_ARG(uint32_t, u32Src, 1);
5310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5311
5312 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5313 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5314 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5315 IEM_MC_REF_EFLAGS(pEFlags);
5316 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5317
5318 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5319 IEM_MC_ADVANCE_RIP();
5320 IEM_MC_END();
5321 return VINF_SUCCESS;
5322
5323 case IEMMODE_64BIT:
5324 IEM_MC_BEGIN(3, 0);
5325 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5326 IEM_MC_ARG(uint64_t, u64Src, 1);
5327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5328
5329 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5330 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5331 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5332 IEM_MC_REF_EFLAGS(pEFlags);
5333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5334
5335 IEM_MC_ADVANCE_RIP();
5336 IEM_MC_END();
5337 return VINF_SUCCESS;
5338
5339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5340 }
5341 }
5342 else
5343 {
5344 /* memory destination. */
5345
5346 uint32_t fAccess;
5347 if (pImpl->pfnLockedU16)
5348 fAccess = IEM_ACCESS_DATA_RW;
5349 else /* BT */
5350 fAccess = IEM_ACCESS_DATA_R;
5351
5352 /** @todo test negative bit offsets! */
5353 switch (pVCpu->iem.s.enmEffOpSize)
5354 {
5355 case IEMMODE_16BIT:
5356 IEM_MC_BEGIN(3, 2);
5357 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5358 IEM_MC_ARG(uint16_t, u16Src, 1);
5359 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5361 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5362
5363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5364 if (pImpl->pfnLockedU16)
5365 IEMOP_HLP_DONE_DECODING();
5366 else
5367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5368 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5369 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5370 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5371 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5372 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5373 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5374 IEM_MC_FETCH_EFLAGS(EFlags);
5375
5376 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5377 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5378 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5379 else
5380 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5381 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5382
5383 IEM_MC_COMMIT_EFLAGS(EFlags);
5384 IEM_MC_ADVANCE_RIP();
5385 IEM_MC_END();
5386 return VINF_SUCCESS;
5387
5388 case IEMMODE_32BIT:
5389 IEM_MC_BEGIN(3, 2);
5390 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5391 IEM_MC_ARG(uint32_t, u32Src, 1);
5392 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5394 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5395
5396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5397 if (pImpl->pfnLockedU16)
5398 IEMOP_HLP_DONE_DECODING();
5399 else
5400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5401 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5402 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5403 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5404 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5405 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5406 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5407 IEM_MC_FETCH_EFLAGS(EFlags);
5408
5409 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5410 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5411 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5412 else
5413 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5414 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5415
5416 IEM_MC_COMMIT_EFLAGS(EFlags);
5417 IEM_MC_ADVANCE_RIP();
5418 IEM_MC_END();
5419 return VINF_SUCCESS;
5420
5421 case IEMMODE_64BIT:
5422 IEM_MC_BEGIN(3, 2);
5423 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5424 IEM_MC_ARG(uint64_t, u64Src, 1);
5425 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5427 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5428
5429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5430 if (pImpl->pfnLockedU16)
5431 IEMOP_HLP_DONE_DECODING();
5432 else
5433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5434 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5435 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5436 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5437 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5438 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5439 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5440 IEM_MC_FETCH_EFLAGS(EFlags);
5441
5442 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5443 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5444 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5445 else
5446 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5447 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5448
5449 IEM_MC_COMMIT_EFLAGS(EFlags);
5450 IEM_MC_ADVANCE_RIP();
5451 IEM_MC_END();
5452 return VINF_SUCCESS;
5453
5454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5455 }
5456 }
5457}
5458
5459
5460/** Opcode 0x0f 0xa3. */
5461FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5462{
5463 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5464 IEMOP_HLP_MIN_386();
5465 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5466}
5467
5468
5469/**
5470 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5471 */
5472FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5473{
5474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5475 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5476
5477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5478 {
5479 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5481
5482 switch (pVCpu->iem.s.enmEffOpSize)
5483 {
5484 case IEMMODE_16BIT:
5485 IEM_MC_BEGIN(4, 0);
5486 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5487 IEM_MC_ARG(uint16_t, u16Src, 1);
5488 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5489 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5490
5491 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5492 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5493 IEM_MC_REF_EFLAGS(pEFlags);
5494 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5495
5496 IEM_MC_ADVANCE_RIP();
5497 IEM_MC_END();
5498 return VINF_SUCCESS;
5499
5500 case IEMMODE_32BIT:
5501 IEM_MC_BEGIN(4, 0);
5502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5503 IEM_MC_ARG(uint32_t, u32Src, 1);
5504 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5505 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5506
5507 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5508 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5509 IEM_MC_REF_EFLAGS(pEFlags);
5510 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5511
5512 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5513 IEM_MC_ADVANCE_RIP();
5514 IEM_MC_END();
5515 return VINF_SUCCESS;
5516
5517 case IEMMODE_64BIT:
5518 IEM_MC_BEGIN(4, 0);
5519 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5520 IEM_MC_ARG(uint64_t, u64Src, 1);
5521 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5522 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5523
5524 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5525 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5526 IEM_MC_REF_EFLAGS(pEFlags);
5527 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5528
5529 IEM_MC_ADVANCE_RIP();
5530 IEM_MC_END();
5531 return VINF_SUCCESS;
5532
5533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5534 }
5535 }
5536 else
5537 {
5538 switch (pVCpu->iem.s.enmEffOpSize)
5539 {
5540 case IEMMODE_16BIT:
5541 IEM_MC_BEGIN(4, 2);
5542 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5543 IEM_MC_ARG(uint16_t, u16Src, 1);
5544 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5545 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5547
5548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5549 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5550 IEM_MC_ASSIGN(cShiftArg, cShift);
5551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5552 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5553 IEM_MC_FETCH_EFLAGS(EFlags);
5554 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5555 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5556
5557 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5558 IEM_MC_COMMIT_EFLAGS(EFlags);
5559 IEM_MC_ADVANCE_RIP();
5560 IEM_MC_END();
5561 return VINF_SUCCESS;
5562
5563 case IEMMODE_32BIT:
5564 IEM_MC_BEGIN(4, 2);
5565 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5566 IEM_MC_ARG(uint32_t, u32Src, 1);
5567 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5568 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5570
5571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5572 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5573 IEM_MC_ASSIGN(cShiftArg, cShift);
5574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5575 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5576 IEM_MC_FETCH_EFLAGS(EFlags);
5577 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5578 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5579
5580 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5581 IEM_MC_COMMIT_EFLAGS(EFlags);
5582 IEM_MC_ADVANCE_RIP();
5583 IEM_MC_END();
5584 return VINF_SUCCESS;
5585
5586 case IEMMODE_64BIT:
5587 IEM_MC_BEGIN(4, 2);
5588 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5589 IEM_MC_ARG(uint64_t, u64Src, 1);
5590 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5591 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5593
5594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5595 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5596 IEM_MC_ASSIGN(cShiftArg, cShift);
5597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5598 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5599 IEM_MC_FETCH_EFLAGS(EFlags);
5600 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5601 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5602
5603 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5604 IEM_MC_COMMIT_EFLAGS(EFlags);
5605 IEM_MC_ADVANCE_RIP();
5606 IEM_MC_END();
5607 return VINF_SUCCESS;
5608
5609 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5610 }
5611 }
5612}
5613
5614
5615/**
5616 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5617 */
5618FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5619{
5620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5621 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5622
5623 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5624 {
5625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5626
5627 switch (pVCpu->iem.s.enmEffOpSize)
5628 {
5629 case IEMMODE_16BIT:
5630 IEM_MC_BEGIN(4, 0);
5631 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5632 IEM_MC_ARG(uint16_t, u16Src, 1);
5633 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5634 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5635
5636 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5637 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5638 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5639 IEM_MC_REF_EFLAGS(pEFlags);
5640 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5641
5642 IEM_MC_ADVANCE_RIP();
5643 IEM_MC_END();
5644 return VINF_SUCCESS;
5645
5646 case IEMMODE_32BIT:
5647 IEM_MC_BEGIN(4, 0);
5648 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5649 IEM_MC_ARG(uint32_t, u32Src, 1);
5650 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5651 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5652
5653 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5654 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5655 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5656 IEM_MC_REF_EFLAGS(pEFlags);
5657 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5658
5659 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5660 IEM_MC_ADVANCE_RIP();
5661 IEM_MC_END();
5662 return VINF_SUCCESS;
5663
5664 case IEMMODE_64BIT:
5665 IEM_MC_BEGIN(4, 0);
5666 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5667 IEM_MC_ARG(uint64_t, u64Src, 1);
5668 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5669 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5670
5671 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5672 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5673 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5674 IEM_MC_REF_EFLAGS(pEFlags);
5675 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5676
5677 IEM_MC_ADVANCE_RIP();
5678 IEM_MC_END();
5679 return VINF_SUCCESS;
5680
5681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5682 }
5683 }
5684 else
5685 {
5686 switch (pVCpu->iem.s.enmEffOpSize)
5687 {
5688 case IEMMODE_16BIT:
5689 IEM_MC_BEGIN(4, 2);
5690 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5691 IEM_MC_ARG(uint16_t, u16Src, 1);
5692 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5693 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5695
5696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5699 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5700 IEM_MC_FETCH_EFLAGS(EFlags);
5701 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5702 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5703
5704 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5705 IEM_MC_COMMIT_EFLAGS(EFlags);
5706 IEM_MC_ADVANCE_RIP();
5707 IEM_MC_END();
5708 return VINF_SUCCESS;
5709
5710 case IEMMODE_32BIT:
5711 IEM_MC_BEGIN(4, 2);
5712 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5713 IEM_MC_ARG(uint32_t, u32Src, 1);
5714 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5715 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5717
5718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5720 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5721 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5722 IEM_MC_FETCH_EFLAGS(EFlags);
5723 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5724 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5725
5726 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5727 IEM_MC_COMMIT_EFLAGS(EFlags);
5728 IEM_MC_ADVANCE_RIP();
5729 IEM_MC_END();
5730 return VINF_SUCCESS;
5731
5732 case IEMMODE_64BIT:
5733 IEM_MC_BEGIN(4, 2);
5734 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5735 IEM_MC_ARG(uint64_t, u64Src, 1);
5736 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5739
5740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5742 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5743 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5744 IEM_MC_FETCH_EFLAGS(EFlags);
5745 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5746 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5747
5748 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5749 IEM_MC_COMMIT_EFLAGS(EFlags);
5750 IEM_MC_ADVANCE_RIP();
5751 IEM_MC_END();
5752 return VINF_SUCCESS;
5753
5754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5755 }
5756 }
5757}
5758
5759
5760
5761/** Opcode 0x0f 0xa4. */
5762FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5763{
5764 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5765 IEMOP_HLP_MIN_386();
5766 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5767}
5768
5769
5770/** Opcode 0x0f 0xa5. */
5771FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5772{
5773 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5774 IEMOP_HLP_MIN_386();
5775 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5776}
5777
5778
5779/** Opcode 0x0f 0xa8. */
5780FNIEMOP_DEF(iemOp_push_gs)
5781{
5782 IEMOP_MNEMONIC(push_gs, "push gs");
5783 IEMOP_HLP_MIN_386();
5784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5785 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5786}
5787
5788
5789/** Opcode 0x0f 0xa9. */
5790FNIEMOP_DEF(iemOp_pop_gs)
5791{
5792 IEMOP_MNEMONIC(pop_gs, "pop gs");
5793 IEMOP_HLP_MIN_386();
5794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5795 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5796}
5797
5798
5799/** Opcode 0x0f 0xaa. */
5800FNIEMOP_STUB(iemOp_rsm);
5801//IEMOP_HLP_MIN_386();
5802
5803
5804/** Opcode 0x0f 0xab. */
5805FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5806{
5807 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5808 IEMOP_HLP_MIN_386();
5809 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5810}
5811
5812
5813/** Opcode 0x0f 0xac. */
5814FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5815{
5816 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5817 IEMOP_HLP_MIN_386();
5818 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5819}
5820
5821
5822/** Opcode 0x0f 0xad. */
5823FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5824{
5825 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5826 IEMOP_HLP_MIN_386();
5827 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5828}
5829
5830
5831/** Opcode 0x0f 0xae mem/0. */
5832FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5833{
5834 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5835 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5836 return IEMOP_RAISE_INVALID_OPCODE();
5837
5838 IEM_MC_BEGIN(3, 1);
5839 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5840 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5841 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5844 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5845 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5846 IEM_MC_END();
5847 return VINF_SUCCESS;
5848}
5849
5850
5851/** Opcode 0x0f 0xae mem/1. */
5852FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5853{
5854 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5855 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5856 return IEMOP_RAISE_INVALID_OPCODE();
5857
5858 IEM_MC_BEGIN(3, 1);
5859 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5860 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5861 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5864 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5865 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5866 IEM_MC_END();
5867 return VINF_SUCCESS;
5868}
5869
5870
5871/** Opcode 0x0f 0xae mem/2. */
5872FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5873
5874/** Opcode 0x0f 0xae mem/3. */
5875FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5876
5877/** Opcode 0x0f 0xae mem/4. */
5878FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5879
5880/** Opcode 0x0f 0xae mem/5. */
5881FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5882
5883/** Opcode 0x0f 0xae mem/6. */
5884FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5885
5886/** Opcode 0x0f 0xae mem/7. */
5887FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5888
5889
5890/** Opcode 0x0f 0xae 11b/5. */
5891FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5892{
5893 RT_NOREF_PV(bRm);
5894 IEMOP_MNEMONIC(lfence, "lfence");
5895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5896 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5897 return IEMOP_RAISE_INVALID_OPCODE();
5898
5899 IEM_MC_BEGIN(0, 0);
5900 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5901 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5902 else
5903 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5904 IEM_MC_ADVANCE_RIP();
5905 IEM_MC_END();
5906 return VINF_SUCCESS;
5907}
5908
5909
5910/** Opcode 0x0f 0xae 11b/6. */
5911FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5912{
5913 RT_NOREF_PV(bRm);
5914 IEMOP_MNEMONIC(mfence, "mfence");
5915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5916 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5917 return IEMOP_RAISE_INVALID_OPCODE();
5918
5919 IEM_MC_BEGIN(0, 0);
5920 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5921 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5922 else
5923 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5924 IEM_MC_ADVANCE_RIP();
5925 IEM_MC_END();
5926 return VINF_SUCCESS;
5927}
5928
5929
5930/** Opcode 0x0f 0xae 11b/7. */
5931FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5932{
5933 RT_NOREF_PV(bRm);
5934 IEMOP_MNEMONIC(sfence, "sfence");
5935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5936 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5937 return IEMOP_RAISE_INVALID_OPCODE();
5938
5939 IEM_MC_BEGIN(0, 0);
5940 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5941 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5942 else
5943 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5944 IEM_MC_ADVANCE_RIP();
5945 IEM_MC_END();
5946 return VINF_SUCCESS;
5947}
5948
5949
5950/** Opcode 0xf3 0x0f 0xae 11b/0. */
5951FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5952
5953/** Opcode 0xf3 0x0f 0xae 11b/1. */
5954FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5955
5956/** Opcode 0xf3 0x0f 0xae 11b/2. */
5957FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5958
5959/** Opcode 0xf3 0x0f 0xae 11b/3. */
5960FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5961
5962
5963/** Opcode 0x0f 0xae. */
5964FNIEMOP_DEF(iemOp_Grp15)
5965{
5966 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5968 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5969 {
5970 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5971 {
5972 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5973 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5974 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5975 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5976 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5977 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5978 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5979 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5980 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5981 }
5982 }
5983 else
5984 {
5985 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5986 {
5987 case 0:
5988 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5989 {
5990 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5991 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5992 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5993 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5994 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5995 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5996 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5997 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5999 }
6000 break;
6001
6002 case IEM_OP_PRF_REPZ:
6003 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6004 {
6005 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
6006 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
6007 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
6008 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
6009 case 4: return IEMOP_RAISE_INVALID_OPCODE();
6010 case 5: return IEMOP_RAISE_INVALID_OPCODE();
6011 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6012 case 7: return IEMOP_RAISE_INVALID_OPCODE();
6013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6014 }
6015 break;
6016
6017 default:
6018 return IEMOP_RAISE_INVALID_OPCODE();
6019 }
6020 }
6021}
6022
6023
6024/** Opcode 0x0f 0xaf. */
6025FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6026{
6027 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6028 IEMOP_HLP_MIN_386();
6029 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6030 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6031}
6032
6033
6034/** Opcode 0x0f 0xb0. */
6035FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6036{
6037 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6038 IEMOP_HLP_MIN_486();
6039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6040
6041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6042 {
6043 IEMOP_HLP_DONE_DECODING();
6044 IEM_MC_BEGIN(4, 0);
6045 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6046 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6047 IEM_MC_ARG(uint8_t, u8Src, 2);
6048 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6049
6050 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6051 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6052 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6053 IEM_MC_REF_EFLAGS(pEFlags);
6054 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6055 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6056 else
6057 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6058
6059 IEM_MC_ADVANCE_RIP();
6060 IEM_MC_END();
6061 }
6062 else
6063 {
6064 IEM_MC_BEGIN(4, 3);
6065 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6066 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6067 IEM_MC_ARG(uint8_t, u8Src, 2);
6068 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6070 IEM_MC_LOCAL(uint8_t, u8Al);
6071
6072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6073 IEMOP_HLP_DONE_DECODING();
6074 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6075 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6076 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6077 IEM_MC_FETCH_EFLAGS(EFlags);
6078 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6079 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6080 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6081 else
6082 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6083
6084 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6085 IEM_MC_COMMIT_EFLAGS(EFlags);
6086 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6087 IEM_MC_ADVANCE_RIP();
6088 IEM_MC_END();
6089 }
6090 return VINF_SUCCESS;
6091}
6092
6093/** Opcode 0x0f 0xb1. */
6094FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6095{
6096 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6097 IEMOP_HLP_MIN_486();
6098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6099
6100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6101 {
6102 IEMOP_HLP_DONE_DECODING();
6103 switch (pVCpu->iem.s.enmEffOpSize)
6104 {
6105 case IEMMODE_16BIT:
6106 IEM_MC_BEGIN(4, 0);
6107 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6108 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6109 IEM_MC_ARG(uint16_t, u16Src, 2);
6110 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6111
6112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6114 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6115 IEM_MC_REF_EFLAGS(pEFlags);
6116 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6117 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6118 else
6119 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6120
6121 IEM_MC_ADVANCE_RIP();
6122 IEM_MC_END();
6123 return VINF_SUCCESS;
6124
6125 case IEMMODE_32BIT:
6126 IEM_MC_BEGIN(4, 0);
6127 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6128 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6129 IEM_MC_ARG(uint32_t, u32Src, 2);
6130 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6131
6132 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6133 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6134 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6135 IEM_MC_REF_EFLAGS(pEFlags);
6136 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6138 else
6139 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6140
6141 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6142 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6143 IEM_MC_ADVANCE_RIP();
6144 IEM_MC_END();
6145 return VINF_SUCCESS;
6146
6147 case IEMMODE_64BIT:
6148 IEM_MC_BEGIN(4, 0);
6149 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6150 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6151#ifdef RT_ARCH_X86
6152 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6153#else
6154 IEM_MC_ARG(uint64_t, u64Src, 2);
6155#endif
6156 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6157
6158 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6159 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6160 IEM_MC_REF_EFLAGS(pEFlags);
6161#ifdef RT_ARCH_X86
6162 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6163 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6164 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6165 else
6166 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6167#else
6168 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6169 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6170 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6171 else
6172 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6173#endif
6174
6175 IEM_MC_ADVANCE_RIP();
6176 IEM_MC_END();
6177 return VINF_SUCCESS;
6178
6179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6180 }
6181 }
6182 else
6183 {
6184 switch (pVCpu->iem.s.enmEffOpSize)
6185 {
6186 case IEMMODE_16BIT:
6187 IEM_MC_BEGIN(4, 3);
6188 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6189 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6190 IEM_MC_ARG(uint16_t, u16Src, 2);
6191 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6193 IEM_MC_LOCAL(uint16_t, u16Ax);
6194
6195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6196 IEMOP_HLP_DONE_DECODING();
6197 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6198 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6199 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6200 IEM_MC_FETCH_EFLAGS(EFlags);
6201 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6202 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6203 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6204 else
6205 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6206
6207 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6208 IEM_MC_COMMIT_EFLAGS(EFlags);
6209 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6210 IEM_MC_ADVANCE_RIP();
6211 IEM_MC_END();
6212 return VINF_SUCCESS;
6213
6214 case IEMMODE_32BIT:
6215 IEM_MC_BEGIN(4, 3);
6216 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6217 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6218 IEM_MC_ARG(uint32_t, u32Src, 2);
6219 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6221 IEM_MC_LOCAL(uint32_t, u32Eax);
6222
6223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6224 IEMOP_HLP_DONE_DECODING();
6225 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6226 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6227 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6228 IEM_MC_FETCH_EFLAGS(EFlags);
6229 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6230 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6231 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6232 else
6233 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6234
6235 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6236 IEM_MC_COMMIT_EFLAGS(EFlags);
6237 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6238 IEM_MC_ADVANCE_RIP();
6239 IEM_MC_END();
6240 return VINF_SUCCESS;
6241
6242 case IEMMODE_64BIT:
6243 IEM_MC_BEGIN(4, 3);
6244 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6245 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6246#ifdef RT_ARCH_X86
6247 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6248#else
6249 IEM_MC_ARG(uint64_t, u64Src, 2);
6250#endif
6251 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6253 IEM_MC_LOCAL(uint64_t, u64Rax);
6254
6255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6256 IEMOP_HLP_DONE_DECODING();
6257 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6258 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6259 IEM_MC_FETCH_EFLAGS(EFlags);
6260 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6261#ifdef RT_ARCH_X86
6262 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6263 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6264 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6265 else
6266 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6267#else
6268 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6270 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6271 else
6272 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6273#endif
6274
6275 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6276 IEM_MC_COMMIT_EFLAGS(EFlags);
6277 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6278 IEM_MC_ADVANCE_RIP();
6279 IEM_MC_END();
6280 return VINF_SUCCESS;
6281
6282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6283 }
6284 }
6285}
6286
6287
6288FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6289{
6290 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6291 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6292
6293 switch (pVCpu->iem.s.enmEffOpSize)
6294 {
6295 case IEMMODE_16BIT:
6296 IEM_MC_BEGIN(5, 1);
6297 IEM_MC_ARG(uint16_t, uSel, 0);
6298 IEM_MC_ARG(uint16_t, offSeg, 1);
6299 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6300 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6301 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6302 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6305 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6306 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6307 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6308 IEM_MC_END();
6309 return VINF_SUCCESS;
6310
6311 case IEMMODE_32BIT:
6312 IEM_MC_BEGIN(5, 1);
6313 IEM_MC_ARG(uint16_t, uSel, 0);
6314 IEM_MC_ARG(uint32_t, offSeg, 1);
6315 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6316 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6317 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6318 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6321 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6322 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6323 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6324 IEM_MC_END();
6325 return VINF_SUCCESS;
6326
6327 case IEMMODE_64BIT:
6328 IEM_MC_BEGIN(5, 1);
6329 IEM_MC_ARG(uint16_t, uSel, 0);
6330 IEM_MC_ARG(uint64_t, offSeg, 1);
6331 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6332 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6333 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6334 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6337 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6338 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6339 else
6340 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6341 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6342 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6343 IEM_MC_END();
6344 return VINF_SUCCESS;
6345
6346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6347 }
6348}
6349
6350
6351/** Opcode 0x0f 0xb2. */
6352FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6353{
6354 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6355 IEMOP_HLP_MIN_386();
6356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6357 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6358 return IEMOP_RAISE_INVALID_OPCODE();
6359 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6360}
6361
6362
6363/** Opcode 0x0f 0xb3. */
6364FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6365{
6366 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6367 IEMOP_HLP_MIN_386();
6368 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6369}
6370
6371
6372/** Opcode 0x0f 0xb4. */
6373FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6374{
6375 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6376 IEMOP_HLP_MIN_386();
6377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6379 return IEMOP_RAISE_INVALID_OPCODE();
6380 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6381}
6382
6383
6384/** Opcode 0x0f 0xb5. */
6385FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6386{
6387 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6388 IEMOP_HLP_MIN_386();
6389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6390 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6391 return IEMOP_RAISE_INVALID_OPCODE();
6392 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6393}
6394
6395
6396/** Opcode 0x0f 0xb6. */
6397FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6398{
6399 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6400 IEMOP_HLP_MIN_386();
6401
6402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6403
6404 /*
6405 * If rm is denoting a register, no more instruction bytes.
6406 */
6407 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6408 {
6409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6410 switch (pVCpu->iem.s.enmEffOpSize)
6411 {
6412 case IEMMODE_16BIT:
6413 IEM_MC_BEGIN(0, 1);
6414 IEM_MC_LOCAL(uint16_t, u16Value);
6415 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6416 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6417 IEM_MC_ADVANCE_RIP();
6418 IEM_MC_END();
6419 return VINF_SUCCESS;
6420
6421 case IEMMODE_32BIT:
6422 IEM_MC_BEGIN(0, 1);
6423 IEM_MC_LOCAL(uint32_t, u32Value);
6424 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6425 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6426 IEM_MC_ADVANCE_RIP();
6427 IEM_MC_END();
6428 return VINF_SUCCESS;
6429
6430 case IEMMODE_64BIT:
6431 IEM_MC_BEGIN(0, 1);
6432 IEM_MC_LOCAL(uint64_t, u64Value);
6433 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6434 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6435 IEM_MC_ADVANCE_RIP();
6436 IEM_MC_END();
6437 return VINF_SUCCESS;
6438
6439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6440 }
6441 }
6442 else
6443 {
6444 /*
6445 * We're loading a register from memory.
6446 */
6447 switch (pVCpu->iem.s.enmEffOpSize)
6448 {
6449 case IEMMODE_16BIT:
6450 IEM_MC_BEGIN(0, 2);
6451 IEM_MC_LOCAL(uint16_t, u16Value);
6452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6455 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6456 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 return VINF_SUCCESS;
6460
6461 case IEMMODE_32BIT:
6462 IEM_MC_BEGIN(0, 2);
6463 IEM_MC_LOCAL(uint32_t, u32Value);
6464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6467 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6468 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6469 IEM_MC_ADVANCE_RIP();
6470 IEM_MC_END();
6471 return VINF_SUCCESS;
6472
6473 case IEMMODE_64BIT:
6474 IEM_MC_BEGIN(0, 2);
6475 IEM_MC_LOCAL(uint64_t, u64Value);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6480 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6481 IEM_MC_ADVANCE_RIP();
6482 IEM_MC_END();
6483 return VINF_SUCCESS;
6484
6485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6486 }
6487 }
6488}
6489
6490
6491/** Opcode 0x0f 0xb7. */
6492FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6493{
6494 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6495 IEMOP_HLP_MIN_386();
6496
6497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6498
6499 /** @todo Not entirely sure how the operand size prefix is handled here,
6500 * assuming that it will be ignored. Would be nice to have a few
6501 * test for this. */
6502 /*
6503 * If rm is denoting a register, no more instruction bytes.
6504 */
6505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6506 {
6507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6508 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6509 {
6510 IEM_MC_BEGIN(0, 1);
6511 IEM_MC_LOCAL(uint32_t, u32Value);
6512 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6513 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6514 IEM_MC_ADVANCE_RIP();
6515 IEM_MC_END();
6516 }
6517 else
6518 {
6519 IEM_MC_BEGIN(0, 1);
6520 IEM_MC_LOCAL(uint64_t, u64Value);
6521 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6522 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6523 IEM_MC_ADVANCE_RIP();
6524 IEM_MC_END();
6525 }
6526 }
6527 else
6528 {
6529 /*
6530 * We're loading a register from memory.
6531 */
6532 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6533 {
6534 IEM_MC_BEGIN(0, 2);
6535 IEM_MC_LOCAL(uint32_t, u32Value);
6536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6539 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6540 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6541 IEM_MC_ADVANCE_RIP();
6542 IEM_MC_END();
6543 }
6544 else
6545 {
6546 IEM_MC_BEGIN(0, 2);
6547 IEM_MC_LOCAL(uint64_t, u64Value);
6548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6551 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6552 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6553 IEM_MC_ADVANCE_RIP();
6554 IEM_MC_END();
6555 }
6556 }
6557 return VINF_SUCCESS;
6558}
6559
6560
6561/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6562FNIEMOP_UD_STUB(iemOp_jmpe);
6563/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6564FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6565
6566
6567/** Opcode 0x0f 0xb9. */
6568FNIEMOP_DEF(iemOp_Grp10)
6569{
6570 Log(("iemOp_Grp10 -> #UD\n"));
6571 return IEMOP_RAISE_INVALID_OPCODE();
6572}
6573
6574
6575/** Opcode 0x0f 0xba. */
6576FNIEMOP_DEF(iemOp_Grp8)
6577{
6578 IEMOP_HLP_MIN_386();
6579 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6580 PCIEMOPBINSIZES pImpl;
6581 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6582 {
6583 case 0: case 1: case 2: case 3:
6584 return IEMOP_RAISE_INVALID_OPCODE();
6585 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6586 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6587 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6588 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6590 }
6591 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6592
6593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6594 {
6595 /* register destination. */
6596 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6598
6599 switch (pVCpu->iem.s.enmEffOpSize)
6600 {
6601 case IEMMODE_16BIT:
6602 IEM_MC_BEGIN(3, 0);
6603 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6604 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6605 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6606
6607 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6608 IEM_MC_REF_EFLAGS(pEFlags);
6609 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6610
6611 IEM_MC_ADVANCE_RIP();
6612 IEM_MC_END();
6613 return VINF_SUCCESS;
6614
6615 case IEMMODE_32BIT:
6616 IEM_MC_BEGIN(3, 0);
6617 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6618 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6619 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6620
6621 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6622 IEM_MC_REF_EFLAGS(pEFlags);
6623 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6624
6625 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6626 IEM_MC_ADVANCE_RIP();
6627 IEM_MC_END();
6628 return VINF_SUCCESS;
6629
6630 case IEMMODE_64BIT:
6631 IEM_MC_BEGIN(3, 0);
6632 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6633 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6634 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6635
6636 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6637 IEM_MC_REF_EFLAGS(pEFlags);
6638 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6639
6640 IEM_MC_ADVANCE_RIP();
6641 IEM_MC_END();
6642 return VINF_SUCCESS;
6643
6644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6645 }
6646 }
6647 else
6648 {
6649 /* memory destination. */
6650
6651 uint32_t fAccess;
6652 if (pImpl->pfnLockedU16)
6653 fAccess = IEM_ACCESS_DATA_RW;
6654 else /* BT */
6655 fAccess = IEM_ACCESS_DATA_R;
6656
6657 /** @todo test negative bit offsets! */
6658 switch (pVCpu->iem.s.enmEffOpSize)
6659 {
6660 case IEMMODE_16BIT:
6661 IEM_MC_BEGIN(3, 1);
6662 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6663 IEM_MC_ARG(uint16_t, u16Src, 1);
6664 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6666
6667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6668 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6669 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6670 if (pImpl->pfnLockedU16)
6671 IEMOP_HLP_DONE_DECODING();
6672 else
6673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6674 IEM_MC_FETCH_EFLAGS(EFlags);
6675 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6676 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6677 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6678 else
6679 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6680 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6681
6682 IEM_MC_COMMIT_EFLAGS(EFlags);
6683 IEM_MC_ADVANCE_RIP();
6684 IEM_MC_END();
6685 return VINF_SUCCESS;
6686
6687 case IEMMODE_32BIT:
6688 IEM_MC_BEGIN(3, 1);
6689 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6690 IEM_MC_ARG(uint32_t, u32Src, 1);
6691 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6693
6694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6695 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6696 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6697 if (pImpl->pfnLockedU16)
6698 IEMOP_HLP_DONE_DECODING();
6699 else
6700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6701 IEM_MC_FETCH_EFLAGS(EFlags);
6702 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6703 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6705 else
6706 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6707 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6708
6709 IEM_MC_COMMIT_EFLAGS(EFlags);
6710 IEM_MC_ADVANCE_RIP();
6711 IEM_MC_END();
6712 return VINF_SUCCESS;
6713
6714 case IEMMODE_64BIT:
6715 IEM_MC_BEGIN(3, 1);
6716 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6717 IEM_MC_ARG(uint64_t, u64Src, 1);
6718 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6720
6721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6722 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6723 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6724 if (pImpl->pfnLockedU16)
6725 IEMOP_HLP_DONE_DECODING();
6726 else
6727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6728 IEM_MC_FETCH_EFLAGS(EFlags);
6729 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6732 else
6733 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6734 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6735
6736 IEM_MC_COMMIT_EFLAGS(EFlags);
6737 IEM_MC_ADVANCE_RIP();
6738 IEM_MC_END();
6739 return VINF_SUCCESS;
6740
6741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6742 }
6743 }
6744
6745}
6746
6747
6748/** Opcode 0x0f 0xbb. */
6749FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6750{
6751 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6752 IEMOP_HLP_MIN_386();
6753 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6754}
6755
6756
6757/** Opcode 0x0f 0xbc. */
6758FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6759{
6760 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6761 IEMOP_HLP_MIN_386();
6762 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6763 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6764}
6765
6766
6767/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6768FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6769
6770
6771/** Opcode 0x0f 0xbd. */
6772FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6773{
6774 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6775 IEMOP_HLP_MIN_386();
6776 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6777 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6778}
6779
6780
6781/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6782FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6783
6784
6785/** Opcode 0x0f 0xbe. */
6786FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6787{
6788 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6789 IEMOP_HLP_MIN_386();
6790
6791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6792
6793 /*
6794 * If rm is denoting a register, no more instruction bytes.
6795 */
6796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6797 {
6798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6799 switch (pVCpu->iem.s.enmEffOpSize)
6800 {
6801 case IEMMODE_16BIT:
6802 IEM_MC_BEGIN(0, 1);
6803 IEM_MC_LOCAL(uint16_t, u16Value);
6804 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6805 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6806 IEM_MC_ADVANCE_RIP();
6807 IEM_MC_END();
6808 return VINF_SUCCESS;
6809
6810 case IEMMODE_32BIT:
6811 IEM_MC_BEGIN(0, 1);
6812 IEM_MC_LOCAL(uint32_t, u32Value);
6813 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6814 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6815 IEM_MC_ADVANCE_RIP();
6816 IEM_MC_END();
6817 return VINF_SUCCESS;
6818
6819 case IEMMODE_64BIT:
6820 IEM_MC_BEGIN(0, 1);
6821 IEM_MC_LOCAL(uint64_t, u64Value);
6822 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6823 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6824 IEM_MC_ADVANCE_RIP();
6825 IEM_MC_END();
6826 return VINF_SUCCESS;
6827
6828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6829 }
6830 }
6831 else
6832 {
6833 /*
6834 * We're loading a register from memory.
6835 */
6836 switch (pVCpu->iem.s.enmEffOpSize)
6837 {
6838 case IEMMODE_16BIT:
6839 IEM_MC_BEGIN(0, 2);
6840 IEM_MC_LOCAL(uint16_t, u16Value);
6841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6844 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6845 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6846 IEM_MC_ADVANCE_RIP();
6847 IEM_MC_END();
6848 return VINF_SUCCESS;
6849
6850 case IEMMODE_32BIT:
6851 IEM_MC_BEGIN(0, 2);
6852 IEM_MC_LOCAL(uint32_t, u32Value);
6853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6856 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6857 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6858 IEM_MC_ADVANCE_RIP();
6859 IEM_MC_END();
6860 return VINF_SUCCESS;
6861
6862 case IEMMODE_64BIT:
6863 IEM_MC_BEGIN(0, 2);
6864 IEM_MC_LOCAL(uint64_t, u64Value);
6865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6868 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6869 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6870 IEM_MC_ADVANCE_RIP();
6871 IEM_MC_END();
6872 return VINF_SUCCESS;
6873
6874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6875 }
6876 }
6877}
6878
6879
6880/** Opcode 0x0f 0xbf. */
6881FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6882{
6883 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6884 IEMOP_HLP_MIN_386();
6885
6886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6887
6888 /** @todo Not entirely sure how the operand size prefix is handled here,
6889 * assuming that it will be ignored. Would be nice to have a few
6890 * test for this. */
6891 /*
6892 * If rm is denoting a register, no more instruction bytes.
6893 */
6894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6895 {
6896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6897 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6898 {
6899 IEM_MC_BEGIN(0, 1);
6900 IEM_MC_LOCAL(uint32_t, u32Value);
6901 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6902 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 }
6906 else
6907 {
6908 IEM_MC_BEGIN(0, 1);
6909 IEM_MC_LOCAL(uint64_t, u64Value);
6910 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6911 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6912 IEM_MC_ADVANCE_RIP();
6913 IEM_MC_END();
6914 }
6915 }
6916 else
6917 {
6918 /*
6919 * We're loading a register from memory.
6920 */
6921 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6922 {
6923 IEM_MC_BEGIN(0, 2);
6924 IEM_MC_LOCAL(uint32_t, u32Value);
6925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6928 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6929 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6930 IEM_MC_ADVANCE_RIP();
6931 IEM_MC_END();
6932 }
6933 else
6934 {
6935 IEM_MC_BEGIN(0, 2);
6936 IEM_MC_LOCAL(uint64_t, u64Value);
6937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6940 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6941 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6942 IEM_MC_ADVANCE_RIP();
6943 IEM_MC_END();
6944 }
6945 }
6946 return VINF_SUCCESS;
6947}
6948
6949
6950/** Opcode 0x0f 0xc0. */
6951FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6952{
6953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6954 IEMOP_HLP_MIN_486();
6955 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6956
6957 /*
6958 * If rm is denoting a register, no more instruction bytes.
6959 */
6960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6961 {
6962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6963
6964 IEM_MC_BEGIN(3, 0);
6965 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6966 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6967 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6968
6969 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6970 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6971 IEM_MC_REF_EFLAGS(pEFlags);
6972 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6973
6974 IEM_MC_ADVANCE_RIP();
6975 IEM_MC_END();
6976 }
6977 else
6978 {
6979 /*
6980 * We're accessing memory.
6981 */
6982 IEM_MC_BEGIN(3, 3);
6983 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6984 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6985 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6986 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6988
6989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6990 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6991 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6992 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6993 IEM_MC_FETCH_EFLAGS(EFlags);
6994 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6995 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6996 else
6997 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6998
6999 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7000 IEM_MC_COMMIT_EFLAGS(EFlags);
7001 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7002 IEM_MC_ADVANCE_RIP();
7003 IEM_MC_END();
7004 return VINF_SUCCESS;
7005 }
7006 return VINF_SUCCESS;
7007}
7008
7009
7010/** Opcode 0x0f 0xc1. */
7011FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7012{
7013 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7014 IEMOP_HLP_MIN_486();
7015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7016
7017 /*
7018 * If rm is denoting a register, no more instruction bytes.
7019 */
7020 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7021 {
7022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7023
7024 switch (pVCpu->iem.s.enmEffOpSize)
7025 {
7026 case IEMMODE_16BIT:
7027 IEM_MC_BEGIN(3, 0);
7028 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7029 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7030 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7031
7032 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7033 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7034 IEM_MC_REF_EFLAGS(pEFlags);
7035 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7036
7037 IEM_MC_ADVANCE_RIP();
7038 IEM_MC_END();
7039 return VINF_SUCCESS;
7040
7041 case IEMMODE_32BIT:
7042 IEM_MC_BEGIN(3, 0);
7043 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7044 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7045 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7046
7047 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7048 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7049 IEM_MC_REF_EFLAGS(pEFlags);
7050 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7051
7052 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7053 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7054 IEM_MC_ADVANCE_RIP();
7055 IEM_MC_END();
7056 return VINF_SUCCESS;
7057
7058 case IEMMODE_64BIT:
7059 IEM_MC_BEGIN(3, 0);
7060 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7061 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7062 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7063
7064 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7065 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7066 IEM_MC_REF_EFLAGS(pEFlags);
7067 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7068
7069 IEM_MC_ADVANCE_RIP();
7070 IEM_MC_END();
7071 return VINF_SUCCESS;
7072
7073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7074 }
7075 }
7076 else
7077 {
7078 /*
7079 * We're accessing memory.
7080 */
7081 switch (pVCpu->iem.s.enmEffOpSize)
7082 {
7083 case IEMMODE_16BIT:
7084 IEM_MC_BEGIN(3, 3);
7085 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7086 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7087 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7088 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7090
7091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7092 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7093 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7094 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7095 IEM_MC_FETCH_EFLAGS(EFlags);
7096 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7097 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7098 else
7099 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7100
7101 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7102 IEM_MC_COMMIT_EFLAGS(EFlags);
7103 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7104 IEM_MC_ADVANCE_RIP();
7105 IEM_MC_END();
7106 return VINF_SUCCESS;
7107
7108 case IEMMODE_32BIT:
7109 IEM_MC_BEGIN(3, 3);
7110 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7111 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7112 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7113 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7115
7116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7117 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7118 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7119 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7120 IEM_MC_FETCH_EFLAGS(EFlags);
7121 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7122 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7123 else
7124 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7125
7126 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7127 IEM_MC_COMMIT_EFLAGS(EFlags);
7128 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7129 IEM_MC_ADVANCE_RIP();
7130 IEM_MC_END();
7131 return VINF_SUCCESS;
7132
7133 case IEMMODE_64BIT:
7134 IEM_MC_BEGIN(3, 3);
7135 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7136 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7137 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7138 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7140
7141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7142 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7143 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7144 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7145 IEM_MC_FETCH_EFLAGS(EFlags);
7146 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7147 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7148 else
7149 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7150
7151 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7152 IEM_MC_COMMIT_EFLAGS(EFlags);
7153 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7154 IEM_MC_ADVANCE_RIP();
7155 IEM_MC_END();
7156 return VINF_SUCCESS;
7157
7158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7159 }
7160 }
7161}
7162
7163
7164/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7165FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7166/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7167FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7168/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7169FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7170/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7171FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7172
7173
7174/** Opcode 0x0f 0xc3. */
7175FNIEMOP_DEF(iemOp_movnti_My_Gy)
7176{
7177 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7178
7179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7180
7181 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7182 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7183 {
7184 switch (pVCpu->iem.s.enmEffOpSize)
7185 {
7186 case IEMMODE_32BIT:
7187 IEM_MC_BEGIN(0, 2);
7188 IEM_MC_LOCAL(uint32_t, u32Value);
7189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7190
7191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7193 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7194 return IEMOP_RAISE_INVALID_OPCODE();
7195
7196 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7197 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7198 IEM_MC_ADVANCE_RIP();
7199 IEM_MC_END();
7200 break;
7201
7202 case IEMMODE_64BIT:
7203 IEM_MC_BEGIN(0, 2);
7204 IEM_MC_LOCAL(uint64_t, u64Value);
7205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7206
7207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7209 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7210 return IEMOP_RAISE_INVALID_OPCODE();
7211
7212 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7213 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7214 IEM_MC_ADVANCE_RIP();
7215 IEM_MC_END();
7216 break;
7217
7218 case IEMMODE_16BIT:
7219 /** @todo check this form. */
7220 return IEMOP_RAISE_INVALID_OPCODE();
7221 }
7222 }
7223 else
7224 return IEMOP_RAISE_INVALID_OPCODE();
7225 return VINF_SUCCESS;
7226}
7227/* Opcode 0x66 0x0f 0xc3 - invalid */
7228/* Opcode 0xf3 0x0f 0xc3 - invalid */
7229/* Opcode 0xf2 0x0f 0xc3 - invalid */
7230
7231/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7232FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7233/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7234FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7235/* Opcode 0xf3 0x0f 0xc4 - invalid */
7236/* Opcode 0xf2 0x0f 0xc4 - invalid */
7237
7238/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7239FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7240/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7241FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7242/* Opcode 0xf3 0x0f 0xc5 - invalid */
7243/* Opcode 0xf2 0x0f 0xc5 - invalid */
7244
7245/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7246FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7247/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7248FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7249/* Opcode 0xf3 0x0f 0xc6 - invalid */
7250/* Opcode 0xf2 0x0f 0xc6 - invalid */
7251
7252
7253/** Opcode 0x0f 0xc7 !11/1. */
7254FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7255{
7256 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7257
7258 IEM_MC_BEGIN(4, 3);
7259 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7260 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7261 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7262 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7263 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7264 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7266
7267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7268 IEMOP_HLP_DONE_DECODING();
7269 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7270
7271 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7272 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7273 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7274
7275 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7276 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7277 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7278
7279 IEM_MC_FETCH_EFLAGS(EFlags);
7280 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7281 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7282 else
7283 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7284
7285 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7286 IEM_MC_COMMIT_EFLAGS(EFlags);
7287 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7288 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7289 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7290 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7291 IEM_MC_ENDIF();
7292 IEM_MC_ADVANCE_RIP();
7293
7294 IEM_MC_END();
7295 return VINF_SUCCESS;
7296}
7297
7298
7299/** Opcode REX.W 0x0f 0xc7 !11/1. */
7300FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7301{
7302 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7303 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7304 {
7305#if 0
7306 RT_NOREF(bRm);
7307 IEMOP_BITCH_ABOUT_STUB();
7308 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7309#else
7310 IEM_MC_BEGIN(4, 3);
7311 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7312 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7313 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7314 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7315 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7316 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7318
7319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7320 IEMOP_HLP_DONE_DECODING();
7321 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7322 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7323
7324 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7325 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7326 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7327
7328 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7329 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7330 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7331
7332 IEM_MC_FETCH_EFLAGS(EFlags);
7333# ifdef RT_ARCH_AMD64
7334 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7335 {
7336 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7337 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7338 else
7339 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7340 }
7341 else
7342# endif
7343 {
7344 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7345 accesses and not all all atomic, which works fine on in UNI CPU guest
7346 configuration (ignoring DMA). If guest SMP is active we have no choice
7347 but to use a rendezvous callback here. Sigh. */
7348 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7349 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7350 else
7351 {
7352 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7353 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7354 }
7355 }
7356
7357 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7358 IEM_MC_COMMIT_EFLAGS(EFlags);
7359 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7360 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7361 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7362 IEM_MC_ENDIF();
7363 IEM_MC_ADVANCE_RIP();
7364
7365 IEM_MC_END();
7366 return VINF_SUCCESS;
7367#endif
7368 }
7369 Log(("cmpxchg16b -> #UD\n"));
7370 return IEMOP_RAISE_INVALID_OPCODE();
7371}
7372
7373
7374/** Opcode 0x0f 0xc7 11/6. */
7375FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7376
7377/** Opcode 0x0f 0xc7 !11/6. */
7378FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7379
7380/** Opcode 0x66 0x0f 0xc7 !11/6. */
7381FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7382
7383/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7384FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7385
7386/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7387FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7388
7389
7390/** Opcode 0x0f 0xc7. */
7391FNIEMOP_DEF(iemOp_Grp9)
7392{
7393 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7395 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7396 {
7397 case 0: case 2: case 3: case 4: case 5:
7398 return IEMOP_RAISE_INVALID_OPCODE();
7399 case 1:
7400 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7401 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7402 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7403 return IEMOP_RAISE_INVALID_OPCODE();
7404 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7405 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7406 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7407 case 6:
7408 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7409 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7410 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7411 {
7412 case 0:
7413 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7414 case IEM_OP_PRF_SIZE_OP:
7415 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7416 case IEM_OP_PRF_REPZ:
7417 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7418 default:
7419 return IEMOP_RAISE_INVALID_OPCODE();
7420 }
7421 case 7:
7422 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7423 {
7424 case 0:
7425 case IEM_OP_PRF_REPZ:
7426 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7427 default:
7428 return IEMOP_RAISE_INVALID_OPCODE();
7429 }
7430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7431 }
7432}
7433
7434
7435/**
7436 * Common 'bswap register' helper.
7437 */
7438FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7439{
7440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7441 switch (pVCpu->iem.s.enmEffOpSize)
7442 {
7443 case IEMMODE_16BIT:
7444 IEM_MC_BEGIN(1, 0);
7445 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7446 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7447 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7448 IEM_MC_ADVANCE_RIP();
7449 IEM_MC_END();
7450 return VINF_SUCCESS;
7451
7452 case IEMMODE_32BIT:
7453 IEM_MC_BEGIN(1, 0);
7454 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7455 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7456 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7457 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7458 IEM_MC_ADVANCE_RIP();
7459 IEM_MC_END();
7460 return VINF_SUCCESS;
7461
7462 case IEMMODE_64BIT:
7463 IEM_MC_BEGIN(1, 0);
7464 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7465 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7466 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7467 IEM_MC_ADVANCE_RIP();
7468 IEM_MC_END();
7469 return VINF_SUCCESS;
7470
7471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7472 }
7473}
7474
7475
7476/** Opcode 0x0f 0xc8. */
7477FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7478{
7479 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7480 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7481 prefix. REX.B is the correct prefix it appears. For a parallel
7482 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7483 IEMOP_HLP_MIN_486();
7484 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7485}
7486
7487
7488/** Opcode 0x0f 0xc9. */
7489FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7490{
7491 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7492 IEMOP_HLP_MIN_486();
7493 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7494}
7495
7496
7497/** Opcode 0x0f 0xca. */
7498FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7499{
7500 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7501 IEMOP_HLP_MIN_486();
7502 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7503}
7504
7505
7506/** Opcode 0x0f 0xcb. */
7507FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7508{
7509 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7510 IEMOP_HLP_MIN_486();
7511 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7512}
7513
7514
7515/** Opcode 0x0f 0xcc. */
7516FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7517{
7518 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7519 IEMOP_HLP_MIN_486();
7520 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7521}
7522
7523
7524/** Opcode 0x0f 0xcd. */
7525FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7526{
7527 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7528 IEMOP_HLP_MIN_486();
7529 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7530}
7531
7532
7533/** Opcode 0x0f 0xce. */
7534FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7535{
7536 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7537 IEMOP_HLP_MIN_486();
7538 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7539}
7540
7541
7542/** Opcode 0x0f 0xcf. */
7543FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7544{
7545 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7546 IEMOP_HLP_MIN_486();
7547 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7548}
7549
7550
7551/* Opcode 0x0f 0xd0 - invalid */
7552/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7553FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7554/* Opcode 0xf3 0x0f 0xd0 - invalid */
7555/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7556FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7557
7558/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7559FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7560/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7561FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7562/* Opcode 0xf3 0x0f 0xd1 - invalid */
7563/* Opcode 0xf2 0x0f 0xd1 - invalid */
7564
7565/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7566FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7567/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7568FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7569/* Opcode 0xf3 0x0f 0xd2 - invalid */
7570/* Opcode 0xf2 0x0f 0xd2 - invalid */
7571
7572/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7573FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7574/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7575FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7576/* Opcode 0xf3 0x0f 0xd3 - invalid */
7577/* Opcode 0xf2 0x0f 0xd3 - invalid */
7578
7579/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7580FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7581/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7582FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7583/* Opcode 0xf3 0x0f 0xd4 - invalid */
7584/* Opcode 0xf2 0x0f 0xd4 - invalid */
7585
7586/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7587FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7588/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7589FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7590/* Opcode 0xf3 0x0f 0xd5 - invalid */
7591/* Opcode 0xf2 0x0f 0xd5 - invalid */
7592
7593/* Opcode 0x0f 0xd6 - invalid */
7594/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7595FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7596/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7597FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7598/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7599FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7600#if 0
7601FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7602{
7603 /* Docs says register only. */
7604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7605
7606 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7607 {
7608 case IEM_OP_PRF_SIZE_OP: /* SSE */
7609 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7610 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7611 IEM_MC_BEGIN(2, 0);
7612 IEM_MC_ARG(uint64_t *, pDst, 0);
7613 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7614 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7615 IEM_MC_PREPARE_SSE_USAGE();
7616 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7617 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7618 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7619 IEM_MC_ADVANCE_RIP();
7620 IEM_MC_END();
7621 return VINF_SUCCESS;
7622
7623 case 0: /* MMX */
7624 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7625 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7626 IEM_MC_BEGIN(2, 0);
7627 IEM_MC_ARG(uint64_t *, pDst, 0);
7628 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7629 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7630 IEM_MC_PREPARE_FPU_USAGE();
7631 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7632 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7633 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7634 IEM_MC_ADVANCE_RIP();
7635 IEM_MC_END();
7636 return VINF_SUCCESS;
7637
7638 default:
7639 return IEMOP_RAISE_INVALID_OPCODE();
7640 }
7641}
7642#endif
7643
7644
7645/** Opcode 0x0f 0xd7. */
7646FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7647{
7648 /* Docs says register only. */
7649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7650 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7651 return IEMOP_RAISE_INVALID_OPCODE();
7652
7653 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7654 /** @todo testcase: Check that the instruction implicitly clears the high
7655 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7656 * and opcode modifications are made to work with the whole width (not
7657 * just 128). */
7658 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7659 {
7660 case IEM_OP_PRF_SIZE_OP: /* SSE */
7661 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7662 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7663 IEM_MC_BEGIN(2, 0);
7664 IEM_MC_ARG(uint64_t *, pDst, 0);
7665 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7666 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7667 IEM_MC_PREPARE_SSE_USAGE();
7668 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7669 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7670 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7671 IEM_MC_ADVANCE_RIP();
7672 IEM_MC_END();
7673 return VINF_SUCCESS;
7674
7675 case 0: /* MMX */
7676 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7677 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7678 IEM_MC_BEGIN(2, 0);
7679 IEM_MC_ARG(uint64_t *, pDst, 0);
7680 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7681 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7682 IEM_MC_PREPARE_FPU_USAGE();
7683 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7684 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7685 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7686 IEM_MC_ADVANCE_RIP();
7687 IEM_MC_END();
7688 return VINF_SUCCESS;
7689
7690 default:
7691 return IEMOP_RAISE_INVALID_OPCODE();
7692 }
7693}
7694
7695
7696/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7697FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7698/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7699FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7700/* Opcode 0xf3 0x0f 0xd8 - invalid */
7701/* Opcode 0xf2 0x0f 0xd8 - invalid */
7702
7703/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7704FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7705/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7706FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7707/* Opcode 0xf3 0x0f 0xd9 - invalid */
7708/* Opcode 0xf2 0x0f 0xd9 - invalid */
7709
7710/** Opcode 0x0f 0xda - pminub Pq, Qq */
7711FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7712/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7713FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7714/* Opcode 0xf3 0x0f 0xda - invalid */
7715/* Opcode 0xf2 0x0f 0xda - invalid */
7716
7717/** Opcode 0x0f 0xdb - pand Pq, Qq */
7718FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7719/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7720FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7721/* Opcode 0xf3 0x0f 0xdb - invalid */
7722/* Opcode 0xf2 0x0f 0xdb - invalid */
7723
7724/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7725FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7726/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7727FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7728/* Opcode 0xf3 0x0f 0xdc - invalid */
7729/* Opcode 0xf2 0x0f 0xdc - invalid */
7730
7731/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7732FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7733/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7734FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7735/* Opcode 0xf3 0x0f 0xdd - invalid */
7736/* Opcode 0xf2 0x0f 0xdd - invalid */
7737
7738/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7739FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7740/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7741FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7742/* Opcode 0xf3 0x0f 0xde - invalid */
7743/* Opcode 0xf2 0x0f 0xde - invalid */
7744
7745/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7746FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7747/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7748FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7749/* Opcode 0xf3 0x0f 0xdf - invalid */
7750/* Opcode 0xf2 0x0f 0xdf - invalid */
7751
7752/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7753FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7754/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7755FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7756/* Opcode 0xf3 0x0f 0xe0 - invalid */
7757/* Opcode 0xf2 0x0f 0xe0 - invalid */
7758
7759/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7760FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7761/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7762FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7763/* Opcode 0xf3 0x0f 0xe1 - invalid */
7764/* Opcode 0xf2 0x0f 0xe1 - invalid */
7765
7766/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7767FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7768/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7769FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7770/* Opcode 0xf3 0x0f 0xe2 - invalid */
7771/* Opcode 0xf2 0x0f 0xe2 - invalid */
7772
7773/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7774FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7775/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7776FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7777/* Opcode 0xf3 0x0f 0xe3 - invalid */
7778/* Opcode 0xf2 0x0f 0xe3 - invalid */
7779
7780/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7781FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7782/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7783FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7784/* Opcode 0xf3 0x0f 0xe4 - invalid */
7785/* Opcode 0xf2 0x0f 0xe4 - invalid */
7786
7787/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7788FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7789/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7790FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7791/* Opcode 0xf3 0x0f 0xe5 - invalid */
7792/* Opcode 0xf2 0x0f 0xe5 - invalid */
7793
7794/* Opcode 0x0f 0xe6 - invalid */
7795/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7796FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7797/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7798FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7799/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7800FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7801
7802
7803/** Opcode 0x0f 0xe7. */
7804FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7805{
7806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7807 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7808 {
7809 /*
7810 * Register, memory.
7811 */
7812/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7813 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7814 {
7815
7816 case IEM_OP_PRF_SIZE_OP: /* SSE */
7817 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7818 IEM_MC_BEGIN(0, 2);
7819 IEM_MC_LOCAL(uint128_t, uSrc);
7820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7821
7822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7824 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7825 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7826
7827 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7828 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7829
7830 IEM_MC_ADVANCE_RIP();
7831 IEM_MC_END();
7832 break;
7833
7834 case 0: /* MMX */
7835 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7836 IEM_MC_BEGIN(0, 2);
7837 IEM_MC_LOCAL(uint64_t, uSrc);
7838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7839
7840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7842 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7843 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7844
7845 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7846 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7847
7848 IEM_MC_ADVANCE_RIP();
7849 IEM_MC_END();
7850 break;
7851
7852 default:
7853 return IEMOP_RAISE_INVALID_OPCODE();
7854 }
7855 }
7856 /* The register, register encoding is invalid. */
7857 else
7858 return IEMOP_RAISE_INVALID_OPCODE();
7859 return VINF_SUCCESS;
7860}
7861
7862
7863/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7864FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7865/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7866FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7867/* Opcode 0xf3 0x0f 0xe8 - invalid */
7868/* Opcode 0xf2 0x0f 0xe8 - invalid */
7869
7870/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7871FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7872/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7873FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7874/* Opcode 0xf3 0x0f 0xe9 - invalid */
7875/* Opcode 0xf2 0x0f 0xe9 - invalid */
7876
7877/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7878FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7879/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7880FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7881/* Opcode 0xf3 0x0f 0xea - invalid */
7882/* Opcode 0xf2 0x0f 0xea - invalid */
7883
7884/** Opcode 0x0f 0xeb - por Pq, Qq */
7885FNIEMOP_STUB(iemOp_por_Pq_Qq);
7886/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7887FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7888/* Opcode 0xf3 0x0f 0xeb - invalid */
7889/* Opcode 0xf2 0x0f 0xeb - invalid */
7890
7891/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7892FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7893/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7894FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7895/* Opcode 0xf3 0x0f 0xec - invalid */
7896/* Opcode 0xf2 0x0f 0xec - invalid */
7897
7898/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7899FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7900/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7901FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7902/* Opcode 0xf3 0x0f 0xed - invalid */
7903/* Opcode 0xf2 0x0f 0xed - invalid */
7904
7905/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7906FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7907/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7908FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7909/* Opcode 0xf3 0x0f 0xee - invalid */
7910/* Opcode 0xf2 0x0f 0xee - invalid */
7911
7912
7913/** Opcode 0x0f 0xef. */
7914FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7915{
7916 IEMOP_MNEMONIC(pxor, "pxor");
7917 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7918}
7919/* Opcode 0xf3 0x0f 0xef - invalid */
7920/* Opcode 0xf2 0x0f 0xef - invalid */
7921
7922/* Opcode 0x0f 0xf0 - invalid */
7923/* Opcode 0x66 0x0f 0xf0 - invalid */
7924/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7925FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7926
7927/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7928FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7929/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7930FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7931/* Opcode 0xf2 0x0f 0xf1 - invalid */
7932
7933/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7934FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7935/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7936FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7937/* Opcode 0xf2 0x0f 0xf2 - invalid */
7938
7939/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7940FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7941/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7942FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7943/* Opcode 0xf2 0x0f 0xf3 - invalid */
7944
7945/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7946FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7947/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7948FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7949/* Opcode 0xf2 0x0f 0xf4 - invalid */
7950
7951/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7952FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7953/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7954FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7955/* Opcode 0xf2 0x0f 0xf5 - invalid */
7956
7957/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7958FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7959/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7960FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7961/* Opcode 0xf2 0x0f 0xf6 - invalid */
7962
7963/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7964FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7965/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7966FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7967/* Opcode 0xf2 0x0f 0xf7 - invalid */
7968
7969/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7970FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7971/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7972FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7973/* Opcode 0xf2 0x0f 0xf8 - invalid */
7974
7975/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7976FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7977/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7978FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7979/* Opcode 0xf2 0x0f 0xf9 - invalid */
7980
7981/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7982FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7983/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7984FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7985/* Opcode 0xf2 0x0f 0xfa - invalid */
7986
7987/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7988FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7989/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7990FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7991/* Opcode 0xf2 0x0f 0xfb - invalid */
7992
7993/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7994FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7995/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7996FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7997/* Opcode 0xf2 0x0f 0xfc - invalid */
7998
7999/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8000FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8001/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
8002FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
8003/* Opcode 0xf2 0x0f 0xfd - invalid */
8004
8005/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8006FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8007/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
8008FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
8009/* Opcode 0xf2 0x0f 0xfe - invalid */
8010
8011
8012/** Opcode **** 0x0f 0xff - UD0 */
8013FNIEMOP_DEF(iemOp_ud0)
8014{
8015 IEMOP_MNEMONIC(ud0, "ud0");
8016 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8017 {
8018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8019#ifndef TST_IEM_CHECK_MC
8020 RTGCPTR GCPtrEff;
8021 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8022 if (rcStrict != VINF_SUCCESS)
8023 return rcStrict;
8024#endif
8025 IEMOP_HLP_DONE_DECODING();
8026 }
8027 return IEMOP_RAISE_INVALID_OPCODE();
8028}
8029
8030
8031
8032/** Repeats a_fn four times. For decoding tables. */
8033#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
8034
8035IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8036{
8037 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8038 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8039 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8040 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8041 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8042 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8043 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8044 /* 0x06 */ IEMOP_X4(iemOp_clts),
8045 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8046 /* 0x08 */ IEMOP_X4(iemOp_invd),
8047 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8048 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8049 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8050 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8051 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8052 /* 0x0e */ IEMOP_X4(iemOp_femms),
8053 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8054
8055 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8056 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8057 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8058 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8059 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8060 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8061 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8062 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8063 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8064 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8065 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8066 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8067 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8068 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8069 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8070 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8071
8072 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8073 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8074 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8075 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8076 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8077 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8078 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8079 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8080 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8081 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8082 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8083 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8084 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8085 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8086 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8087 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8088
8089 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8090 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8091 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8092 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8093 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8094 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8095 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8096 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8097 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8098 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8099 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8100 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8101 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8102 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8103 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8104 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8105
8106 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8107 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8108 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8109 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8110 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8111 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8112 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8113 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8114 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8115 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8116 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8117 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8118 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8119 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8120 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8121 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8122
8123 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8124 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8125 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8126 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8127 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8128 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8129 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8130 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8131 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8132 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8133 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8134 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8135 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8136 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8137 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8138 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8139
8140 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8143 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8145 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8146 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8147 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8148 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8149 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8150 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8151 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8152 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8153 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8154 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8155 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8156
8157 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
8158 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8159 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8160 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8161 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8162 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8163 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8164 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8165
8166 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8167 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8168 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8169 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8170 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8171 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8172 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8173 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8174
8175 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8176 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8177 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8178 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8179 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8180 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8181 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8182 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8183 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8184 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8185 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8186 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8187 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8188 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8189 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8190 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8191
8192 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8193 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8194 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8195 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8196 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8197 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8198 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8199 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8200 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8201 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8202 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8203 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8204 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8205 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8206 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8207 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8208
8209 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8210 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8211 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8212 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8213 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8214 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8215 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8216 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8217 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8218 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8219 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8220 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8221 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8222 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8223 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8224 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8225
8226 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8227 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8228 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8229 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8230 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8231 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8232 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8233 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8234 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8235 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8236 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8237 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8238 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8239 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8240 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8241 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8242
8243 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8244 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8245 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8246 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8247 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8248 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8249 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8250 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8251 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8252 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8253 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8254 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8255 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8256 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8257 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8258 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8259
8260 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8261 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8262 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8263 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8264 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8265 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8266 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8267 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8268 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8269 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8270 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8271 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8272 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8273 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8274 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8275 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8276
8277 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8278 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8279 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8280 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8281 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8282 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8283 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8284 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8285 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8286 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8287 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8288 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8289 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8290 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8291 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8292 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8293
8294 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8295 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8296 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8297 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8298 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8299 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8300 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8301 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8302 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8303 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8304 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8305 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8306 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8307 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8308 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8309 /* 0xff */ IEMOP_X4(iemOp_ud0),
8310};
8311AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8312/** @} */
8313
8314
8315/** @name One byte opcodes.
8316 *
8317 * @{
8318 */
8319
8320/** Opcode 0x00. */
8321FNIEMOP_DEF(iemOp_add_Eb_Gb)
8322{
8323 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8324 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8325}
8326
8327
8328/** Opcode 0x01. */
8329FNIEMOP_DEF(iemOp_add_Ev_Gv)
8330{
8331 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8333}
8334
8335
8336/** Opcode 0x02. */
8337FNIEMOP_DEF(iemOp_add_Gb_Eb)
8338{
8339 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8340 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8341}
8342
8343
8344/** Opcode 0x03. */
8345FNIEMOP_DEF(iemOp_add_Gv_Ev)
8346{
8347 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8349}
8350
8351
8352/** Opcode 0x04. */
8353FNIEMOP_DEF(iemOp_add_Al_Ib)
8354{
8355 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8356 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8357}
8358
8359
8360/** Opcode 0x05. */
8361FNIEMOP_DEF(iemOp_add_eAX_Iz)
8362{
8363 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8364 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8365}
8366
8367
8368/** Opcode 0x06. */
8369FNIEMOP_DEF(iemOp_push_ES)
8370{
8371 IEMOP_MNEMONIC(push_es, "push es");
8372 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8373}
8374
8375
8376/** Opcode 0x07. */
8377FNIEMOP_DEF(iemOp_pop_ES)
8378{
8379 IEMOP_MNEMONIC(pop_es, "pop es");
8380 IEMOP_HLP_NO_64BIT();
8381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8382 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8383}
8384
8385
8386/** Opcode 0x08. */
8387FNIEMOP_DEF(iemOp_or_Eb_Gb)
8388{
8389 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8390 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8391 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8392}
8393
8394
8395/** Opcode 0x09. */
8396FNIEMOP_DEF(iemOp_or_Ev_Gv)
8397{
8398 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8399 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8400 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8401}
8402
8403
8404/** Opcode 0x0a. */
8405FNIEMOP_DEF(iemOp_or_Gb_Eb)
8406{
8407 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8408 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8409 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8410}
8411
8412
8413/** Opcode 0x0b. */
8414FNIEMOP_DEF(iemOp_or_Gv_Ev)
8415{
8416 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8419}
8420
8421
8422/** Opcode 0x0c. */
8423FNIEMOP_DEF(iemOp_or_Al_Ib)
8424{
8425 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8426 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8427 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8428}
8429
8430
8431/** Opcode 0x0d. */
8432FNIEMOP_DEF(iemOp_or_eAX_Iz)
8433{
8434 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8435 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8436 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8437}
8438
8439
8440/** Opcode 0x0e. */
8441FNIEMOP_DEF(iemOp_push_CS)
8442{
8443 IEMOP_MNEMONIC(push_cs, "push cs");
8444 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8445}
8446
8447
8448/** Opcode 0x0f. */
8449FNIEMOP_DEF(iemOp_2byteEscape)
8450{
8451#ifdef VBOX_STRICT
8452 static bool s_fTested = false;
8453 if (RT_LIKELY(s_fTested)) { /* likely */ }
8454 else
8455 {
8456 s_fTested = true;
8457 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8458 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8459 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8460 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8461 }
8462#endif
8463
8464 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8465
8466 /** @todo PUSH CS on 8086, undefined on 80186. */
8467 IEMOP_HLP_MIN_286();
8468 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8469}
8470
8471/** Opcode 0x10. */
8472FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8473{
8474 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8475 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8476}
8477
8478
8479/** Opcode 0x11. */
8480FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8481{
8482 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8483 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8484}
8485
8486
8487/** Opcode 0x12. */
8488FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8489{
8490 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8491 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8492}
8493
8494
8495/** Opcode 0x13. */
8496FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8497{
8498 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8499 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8500}
8501
8502
8503/** Opcode 0x14. */
8504FNIEMOP_DEF(iemOp_adc_Al_Ib)
8505{
8506 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8507 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8508}
8509
8510
8511/** Opcode 0x15. */
8512FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8513{
8514 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8515 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8516}
8517
8518
8519/** Opcode 0x16. */
8520FNIEMOP_DEF(iemOp_push_SS)
8521{
8522 IEMOP_MNEMONIC(push_ss, "push ss");
8523 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8524}
8525
8526
8527/** Opcode 0x17. */
8528FNIEMOP_DEF(iemOp_pop_SS)
8529{
8530 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8532 IEMOP_HLP_NO_64BIT();
8533 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8534}
8535
8536
8537/** Opcode 0x18. */
8538FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8539{
8540 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8541 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8542}
8543
8544
8545/** Opcode 0x19. */
8546FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8547{
8548 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8549 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8550}
8551
8552
8553/** Opcode 0x1a. */
8554FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8555{
8556 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8557 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8558}
8559
8560
8561/** Opcode 0x1b. */
8562FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8563{
8564 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8565 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8566}
8567
8568
8569/** Opcode 0x1c. */
8570FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8571{
8572 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8573 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8574}
8575
8576
8577/** Opcode 0x1d. */
8578FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8579{
8580 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8582}
8583
8584
8585/** Opcode 0x1e. */
8586FNIEMOP_DEF(iemOp_push_DS)
8587{
8588 IEMOP_MNEMONIC(push_ds, "push ds");
8589 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8590}
8591
8592
8593/** Opcode 0x1f. */
8594FNIEMOP_DEF(iemOp_pop_DS)
8595{
8596 IEMOP_MNEMONIC(pop_ds, "pop ds");
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598 IEMOP_HLP_NO_64BIT();
8599 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8600}
8601
8602
8603/** Opcode 0x20. */
8604FNIEMOP_DEF(iemOp_and_Eb_Gb)
8605{
8606 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8607 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8608 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8609}
8610
8611
8612/** Opcode 0x21. */
8613FNIEMOP_DEF(iemOp_and_Ev_Gv)
8614{
8615 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8616 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8617 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8618}
8619
8620
8621/** Opcode 0x22. */
8622FNIEMOP_DEF(iemOp_and_Gb_Eb)
8623{
8624 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8627}
8628
8629
8630/** Opcode 0x23. */
8631FNIEMOP_DEF(iemOp_and_Gv_Ev)
8632{
8633 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8634 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8635 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8636}
8637
8638
8639/** Opcode 0x24. */
8640FNIEMOP_DEF(iemOp_and_Al_Ib)
8641{
8642 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8643 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8644 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8645}
8646
8647
8648/** Opcode 0x25. */
8649FNIEMOP_DEF(iemOp_and_eAX_Iz)
8650{
8651 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8652 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8653 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8654}
8655
8656
8657/** Opcode 0x26. */
8658FNIEMOP_DEF(iemOp_seg_ES)
8659{
8660 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8661 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8662 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8663
8664 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8665 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8666}
8667
8668
8669/** Opcode 0x27. */
8670FNIEMOP_DEF(iemOp_daa)
8671{
8672 IEMOP_MNEMONIC(daa_AL, "daa AL");
8673 IEMOP_HLP_NO_64BIT();
8674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8675 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8676 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8677}
8678
8679
8680/** Opcode 0x28. */
8681FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8682{
8683 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8684 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8685}
8686
8687
8688/** Opcode 0x29. */
8689FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8690{
8691 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8692 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8693}
8694
8695
8696/** Opcode 0x2a. */
8697FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8698{
8699 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8700 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8701}
8702
8703
8704/** Opcode 0x2b. */
8705FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8706{
8707 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8708 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8709}
8710
8711
8712/** Opcode 0x2c. */
8713FNIEMOP_DEF(iemOp_sub_Al_Ib)
8714{
8715 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8716 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8717}
8718
8719
8720/** Opcode 0x2d. */
8721FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8722{
8723 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8724 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8725}
8726
8727
8728/** Opcode 0x2e. */
8729FNIEMOP_DEF(iemOp_seg_CS)
8730{
8731 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8732 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8733 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8734
8735 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8736 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8737}
8738
8739
8740/** Opcode 0x2f. */
8741FNIEMOP_DEF(iemOp_das)
8742{
8743 IEMOP_MNEMONIC(das_AL, "das AL");
8744 IEMOP_HLP_NO_64BIT();
8745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8746 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8747 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8748}
8749
8750
8751/** Opcode 0x30. */
8752FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8753{
8754 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8755 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8756 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8757}
8758
8759
8760/** Opcode 0x31. */
8761FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8762{
8763 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8764 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8765 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8766}
8767
8768
8769/** Opcode 0x32. */
8770FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8771{
8772 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8773 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8774 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8775}
8776
8777
8778/** Opcode 0x33. */
8779FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8780{
8781 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8782 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8783 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8784}
8785
8786
8787/** Opcode 0x34. */
8788FNIEMOP_DEF(iemOp_xor_Al_Ib)
8789{
8790 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8791 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8792 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8793}
8794
8795
8796/** Opcode 0x35. */
8797FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8798{
8799 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8800 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8801 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8802}
8803
8804
8805/** Opcode 0x36. */
8806FNIEMOP_DEF(iemOp_seg_SS)
8807{
8808 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8809 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8810 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8811
8812 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8813 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8814}
8815
8816
8817/** Opcode 0x37. */
8818FNIEMOP_STUB(iemOp_aaa);
8819
8820
8821/** Opcode 0x38. */
8822FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8823{
8824 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8826}
8827
8828
8829/** Opcode 0x39. */
8830FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8831{
8832 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8833 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8834}
8835
8836
8837/** Opcode 0x3a. */
8838FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8839{
8840 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8841 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8842}
8843
8844
8845/** Opcode 0x3b. */
8846FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8847{
8848 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8849 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8850}
8851
8852
8853/** Opcode 0x3c. */
8854FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8855{
8856 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8857 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8858}
8859
8860
8861/** Opcode 0x3d. */
8862FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8863{
8864 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8865 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8866}
8867
8868
8869/** Opcode 0x3e. */
8870FNIEMOP_DEF(iemOp_seg_DS)
8871{
8872 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8873 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8874 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8875
8876 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8877 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8878}
8879
8880
8881/** Opcode 0x3f. */
8882FNIEMOP_STUB(iemOp_aas);
8883
8884/**
8885 * Common 'inc/dec/not/neg register' helper.
8886 */
8887FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8888{
8889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8890 switch (pVCpu->iem.s.enmEffOpSize)
8891 {
8892 case IEMMODE_16BIT:
8893 IEM_MC_BEGIN(2, 0);
8894 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8895 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8896 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8897 IEM_MC_REF_EFLAGS(pEFlags);
8898 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8899 IEM_MC_ADVANCE_RIP();
8900 IEM_MC_END();
8901 return VINF_SUCCESS;
8902
8903 case IEMMODE_32BIT:
8904 IEM_MC_BEGIN(2, 0);
8905 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8906 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8907 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8908 IEM_MC_REF_EFLAGS(pEFlags);
8909 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8910 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8911 IEM_MC_ADVANCE_RIP();
8912 IEM_MC_END();
8913 return VINF_SUCCESS;
8914
8915 case IEMMODE_64BIT:
8916 IEM_MC_BEGIN(2, 0);
8917 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8918 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8919 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8920 IEM_MC_REF_EFLAGS(pEFlags);
8921 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8922 IEM_MC_ADVANCE_RIP();
8923 IEM_MC_END();
8924 return VINF_SUCCESS;
8925 }
8926 return VINF_SUCCESS;
8927}
8928
8929
8930/** Opcode 0x40. */
8931FNIEMOP_DEF(iemOp_inc_eAX)
8932{
8933 /*
8934 * This is a REX prefix in 64-bit mode.
8935 */
8936 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8937 {
8938 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8939 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8940
8941 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8942 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8943 }
8944
8945 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8946 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8947}
8948
8949
8950/** Opcode 0x41. */
8951FNIEMOP_DEF(iemOp_inc_eCX)
8952{
8953 /*
8954 * This is a REX prefix in 64-bit mode.
8955 */
8956 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8957 {
8958 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8959 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8960 pVCpu->iem.s.uRexB = 1 << 3;
8961
8962 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8963 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8964 }
8965
8966 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8967 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8968}
8969
8970
8971/** Opcode 0x42. */
8972FNIEMOP_DEF(iemOp_inc_eDX)
8973{
8974 /*
8975 * This is a REX prefix in 64-bit mode.
8976 */
8977 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8978 {
8979 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8980 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8981 pVCpu->iem.s.uRexIndex = 1 << 3;
8982
8983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8984 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8985 }
8986
8987 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8988 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8989}
8990
8991
8992
8993/** Opcode 0x43. */
8994FNIEMOP_DEF(iemOp_inc_eBX)
8995{
8996 /*
8997 * This is a REX prefix in 64-bit mode.
8998 */
8999 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9000 {
9001 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
9002 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9003 pVCpu->iem.s.uRexB = 1 << 3;
9004 pVCpu->iem.s.uRexIndex = 1 << 3;
9005
9006 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9007 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9008 }
9009
9010 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
9011 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
9012}
9013
9014
9015/** Opcode 0x44. */
9016FNIEMOP_DEF(iemOp_inc_eSP)
9017{
9018 /*
9019 * This is a REX prefix in 64-bit mode.
9020 */
9021 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9022 {
9023 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
9024 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
9025 pVCpu->iem.s.uRexReg = 1 << 3;
9026
9027 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9028 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9029 }
9030
9031 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
9032 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
9033}
9034
9035
9036/** Opcode 0x45. */
9037FNIEMOP_DEF(iemOp_inc_eBP)
9038{
9039 /*
9040 * This is a REX prefix in 64-bit mode.
9041 */
9042 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9043 {
9044 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
9045 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
9046 pVCpu->iem.s.uRexReg = 1 << 3;
9047 pVCpu->iem.s.uRexB = 1 << 3;
9048
9049 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9050 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9051 }
9052
9053 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
9054 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
9055}
9056
9057
9058/** Opcode 0x46. */
9059FNIEMOP_DEF(iemOp_inc_eSI)
9060{
9061 /*
9062 * This is a REX prefix in 64-bit mode.
9063 */
9064 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9065 {
9066 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
9067 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
9068 pVCpu->iem.s.uRexReg = 1 << 3;
9069 pVCpu->iem.s.uRexIndex = 1 << 3;
9070
9071 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9072 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9073 }
9074
9075 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
9076 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
9077}
9078
9079
9080/** Opcode 0x47. */
9081FNIEMOP_DEF(iemOp_inc_eDI)
9082{
9083 /*
9084 * This is a REX prefix in 64-bit mode.
9085 */
9086 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9087 {
9088 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9089 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9090 pVCpu->iem.s.uRexReg = 1 << 3;
9091 pVCpu->iem.s.uRexB = 1 << 3;
9092 pVCpu->iem.s.uRexIndex = 1 << 3;
9093
9094 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9095 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9096 }
9097
9098 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9099 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9100}
9101
9102
9103/** Opcode 0x48. */
9104FNIEMOP_DEF(iemOp_dec_eAX)
9105{
9106 /*
9107 * This is a REX prefix in 64-bit mode.
9108 */
9109 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9110 {
9111 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9112 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9113 iemRecalEffOpSize(pVCpu);
9114
9115 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9116 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9117 }
9118
9119 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9120 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9121}
9122
9123
9124/** Opcode 0x49. */
9125FNIEMOP_DEF(iemOp_dec_eCX)
9126{
9127 /*
9128 * This is a REX prefix in 64-bit mode.
9129 */
9130 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9131 {
9132 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9133 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9134 pVCpu->iem.s.uRexB = 1 << 3;
9135 iemRecalEffOpSize(pVCpu);
9136
9137 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9138 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9139 }
9140
9141 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9142 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9143}
9144
9145
9146/** Opcode 0x4a. */
9147FNIEMOP_DEF(iemOp_dec_eDX)
9148{
9149 /*
9150 * This is a REX prefix in 64-bit mode.
9151 */
9152 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9153 {
9154 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9155 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9156 pVCpu->iem.s.uRexIndex = 1 << 3;
9157 iemRecalEffOpSize(pVCpu);
9158
9159 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9160 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9161 }
9162
9163 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9164 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9165}
9166
9167
9168/** Opcode 0x4b. */
9169FNIEMOP_DEF(iemOp_dec_eBX)
9170{
9171 /*
9172 * This is a REX prefix in 64-bit mode.
9173 */
9174 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9175 {
9176 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9177 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9178 pVCpu->iem.s.uRexB = 1 << 3;
9179 pVCpu->iem.s.uRexIndex = 1 << 3;
9180 iemRecalEffOpSize(pVCpu);
9181
9182 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9183 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9184 }
9185
9186 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9187 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9188}
9189
9190
9191/** Opcode 0x4c. */
9192FNIEMOP_DEF(iemOp_dec_eSP)
9193{
9194 /*
9195 * This is a REX prefix in 64-bit mode.
9196 */
9197 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9198 {
9199 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9200 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9201 pVCpu->iem.s.uRexReg = 1 << 3;
9202 iemRecalEffOpSize(pVCpu);
9203
9204 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9205 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9206 }
9207
9208 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9209 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9210}
9211
9212
9213/** Opcode 0x4d. */
9214FNIEMOP_DEF(iemOp_dec_eBP)
9215{
9216 /*
9217 * This is a REX prefix in 64-bit mode.
9218 */
9219 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9220 {
9221 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9222 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9223 pVCpu->iem.s.uRexReg = 1 << 3;
9224 pVCpu->iem.s.uRexB = 1 << 3;
9225 iemRecalEffOpSize(pVCpu);
9226
9227 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9228 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9229 }
9230
9231 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9232 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9233}
9234
9235
9236/** Opcode 0x4e. */
9237FNIEMOP_DEF(iemOp_dec_eSI)
9238{
9239 /*
9240 * This is a REX prefix in 64-bit mode.
9241 */
9242 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9243 {
9244 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9245 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9246 pVCpu->iem.s.uRexReg = 1 << 3;
9247 pVCpu->iem.s.uRexIndex = 1 << 3;
9248 iemRecalEffOpSize(pVCpu);
9249
9250 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9251 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9252 }
9253
9254 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9255 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9256}
9257
9258
9259/** Opcode 0x4f. */
9260FNIEMOP_DEF(iemOp_dec_eDI)
9261{
9262 /*
9263 * This is a REX prefix in 64-bit mode.
9264 */
9265 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9266 {
9267 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9268 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9269 pVCpu->iem.s.uRexReg = 1 << 3;
9270 pVCpu->iem.s.uRexB = 1 << 3;
9271 pVCpu->iem.s.uRexIndex = 1 << 3;
9272 iemRecalEffOpSize(pVCpu);
9273
9274 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9275 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9276 }
9277
9278 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9279 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9280}
9281
9282
9283/**
9284 * Common 'push register' helper.
9285 */
9286FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9287{
9288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9289 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9290 {
9291 iReg |= pVCpu->iem.s.uRexB;
9292 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9293 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9294 }
9295
9296 switch (pVCpu->iem.s.enmEffOpSize)
9297 {
9298 case IEMMODE_16BIT:
9299 IEM_MC_BEGIN(0, 1);
9300 IEM_MC_LOCAL(uint16_t, u16Value);
9301 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9302 IEM_MC_PUSH_U16(u16Value);
9303 IEM_MC_ADVANCE_RIP();
9304 IEM_MC_END();
9305 break;
9306
9307 case IEMMODE_32BIT:
9308 IEM_MC_BEGIN(0, 1);
9309 IEM_MC_LOCAL(uint32_t, u32Value);
9310 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9311 IEM_MC_PUSH_U32(u32Value);
9312 IEM_MC_ADVANCE_RIP();
9313 IEM_MC_END();
9314 break;
9315
9316 case IEMMODE_64BIT:
9317 IEM_MC_BEGIN(0, 1);
9318 IEM_MC_LOCAL(uint64_t, u64Value);
9319 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9320 IEM_MC_PUSH_U64(u64Value);
9321 IEM_MC_ADVANCE_RIP();
9322 IEM_MC_END();
9323 break;
9324 }
9325
9326 return VINF_SUCCESS;
9327}
9328
9329
9330/** Opcode 0x50. */
9331FNIEMOP_DEF(iemOp_push_eAX)
9332{
9333 IEMOP_MNEMONIC(push_rAX, "push rAX");
9334 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9335}
9336
9337
9338/** Opcode 0x51. */
9339FNIEMOP_DEF(iemOp_push_eCX)
9340{
9341 IEMOP_MNEMONIC(push_rCX, "push rCX");
9342 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9343}
9344
9345
9346/** Opcode 0x52. */
9347FNIEMOP_DEF(iemOp_push_eDX)
9348{
9349 IEMOP_MNEMONIC(push_rDX, "push rDX");
9350 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9351}
9352
9353
9354/** Opcode 0x53. */
9355FNIEMOP_DEF(iemOp_push_eBX)
9356{
9357 IEMOP_MNEMONIC(push_rBX, "push rBX");
9358 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9359}
9360
9361
9362/** Opcode 0x54. */
9363FNIEMOP_DEF(iemOp_push_eSP)
9364{
9365 IEMOP_MNEMONIC(push_rSP, "push rSP");
9366 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9367 {
9368 IEM_MC_BEGIN(0, 1);
9369 IEM_MC_LOCAL(uint16_t, u16Value);
9370 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9371 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9372 IEM_MC_PUSH_U16(u16Value);
9373 IEM_MC_ADVANCE_RIP();
9374 IEM_MC_END();
9375 }
9376 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9377}
9378
9379
9380/** Opcode 0x55. */
9381FNIEMOP_DEF(iemOp_push_eBP)
9382{
9383 IEMOP_MNEMONIC(push_rBP, "push rBP");
9384 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9385}
9386
9387
9388/** Opcode 0x56. */
9389FNIEMOP_DEF(iemOp_push_eSI)
9390{
9391 IEMOP_MNEMONIC(push_rSI, "push rSI");
9392 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9393}
9394
9395
9396/** Opcode 0x57. */
9397FNIEMOP_DEF(iemOp_push_eDI)
9398{
9399 IEMOP_MNEMONIC(push_rDI, "push rDI");
9400 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9401}
9402
9403
9404/**
9405 * Common 'pop register' helper.
9406 */
9407FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9408{
9409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9410 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9411 {
9412 iReg |= pVCpu->iem.s.uRexB;
9413 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9414 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9415 }
9416
9417 switch (pVCpu->iem.s.enmEffOpSize)
9418 {
9419 case IEMMODE_16BIT:
9420 IEM_MC_BEGIN(0, 1);
9421 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9422 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9423 IEM_MC_POP_U16(pu16Dst);
9424 IEM_MC_ADVANCE_RIP();
9425 IEM_MC_END();
9426 break;
9427
9428 case IEMMODE_32BIT:
9429 IEM_MC_BEGIN(0, 1);
9430 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9431 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9432 IEM_MC_POP_U32(pu32Dst);
9433 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9434 IEM_MC_ADVANCE_RIP();
9435 IEM_MC_END();
9436 break;
9437
9438 case IEMMODE_64BIT:
9439 IEM_MC_BEGIN(0, 1);
9440 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9441 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9442 IEM_MC_POP_U64(pu64Dst);
9443 IEM_MC_ADVANCE_RIP();
9444 IEM_MC_END();
9445 break;
9446 }
9447
9448 return VINF_SUCCESS;
9449}
9450
9451
9452/** Opcode 0x58. */
9453FNIEMOP_DEF(iemOp_pop_eAX)
9454{
9455 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9456 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9457}
9458
9459
9460/** Opcode 0x59. */
9461FNIEMOP_DEF(iemOp_pop_eCX)
9462{
9463 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9464 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9465}
9466
9467
9468/** Opcode 0x5a. */
9469FNIEMOP_DEF(iemOp_pop_eDX)
9470{
9471 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9472 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9473}
9474
9475
9476/** Opcode 0x5b. */
9477FNIEMOP_DEF(iemOp_pop_eBX)
9478{
9479 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9480 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9481}
9482
9483
9484/** Opcode 0x5c. */
9485FNIEMOP_DEF(iemOp_pop_eSP)
9486{
9487 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9488 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9489 {
9490 if (pVCpu->iem.s.uRexB)
9491 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9492 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9493 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9494 }
9495
9496 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9497 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9498 /** @todo add testcase for this instruction. */
9499 switch (pVCpu->iem.s.enmEffOpSize)
9500 {
9501 case IEMMODE_16BIT:
9502 IEM_MC_BEGIN(0, 1);
9503 IEM_MC_LOCAL(uint16_t, u16Dst);
9504 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9505 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9506 IEM_MC_ADVANCE_RIP();
9507 IEM_MC_END();
9508 break;
9509
9510 case IEMMODE_32BIT:
9511 IEM_MC_BEGIN(0, 1);
9512 IEM_MC_LOCAL(uint32_t, u32Dst);
9513 IEM_MC_POP_U32(&u32Dst);
9514 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9515 IEM_MC_ADVANCE_RIP();
9516 IEM_MC_END();
9517 break;
9518
9519 case IEMMODE_64BIT:
9520 IEM_MC_BEGIN(0, 1);
9521 IEM_MC_LOCAL(uint64_t, u64Dst);
9522 IEM_MC_POP_U64(&u64Dst);
9523 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9524 IEM_MC_ADVANCE_RIP();
9525 IEM_MC_END();
9526 break;
9527 }
9528
9529 return VINF_SUCCESS;
9530}
9531
9532
9533/** Opcode 0x5d. */
9534FNIEMOP_DEF(iemOp_pop_eBP)
9535{
9536 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9537 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9538}
9539
9540
9541/** Opcode 0x5e. */
9542FNIEMOP_DEF(iemOp_pop_eSI)
9543{
9544 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9545 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9546}
9547
9548
9549/** Opcode 0x5f. */
9550FNIEMOP_DEF(iemOp_pop_eDI)
9551{
9552 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9553 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9554}
9555
9556
9557/** Opcode 0x60. */
9558FNIEMOP_DEF(iemOp_pusha)
9559{
9560 IEMOP_MNEMONIC(pusha, "pusha");
9561 IEMOP_HLP_MIN_186();
9562 IEMOP_HLP_NO_64BIT();
9563 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9564 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9565 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9566 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9567}
9568
9569
9570/** Opcode 0x61. */
9571FNIEMOP_DEF(iemOp_popa)
9572{
9573 IEMOP_MNEMONIC(popa, "popa");
9574 IEMOP_HLP_MIN_186();
9575 IEMOP_HLP_NO_64BIT();
9576 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9577 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9578 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9579 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9580}
9581
9582
9583/** Opcode 0x62. */
9584FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9585// IEMOP_HLP_MIN_186();
9586
9587
9588/** Opcode 0x63 - non-64-bit modes. */
9589FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9590{
9591 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9592 IEMOP_HLP_MIN_286();
9593 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9595
9596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9597 {
9598 /* Register */
9599 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9600 IEM_MC_BEGIN(3, 0);
9601 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9602 IEM_MC_ARG(uint16_t, u16Src, 1);
9603 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9604
9605 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9606 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9607 IEM_MC_REF_EFLAGS(pEFlags);
9608 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9609
9610 IEM_MC_ADVANCE_RIP();
9611 IEM_MC_END();
9612 }
9613 else
9614 {
9615 /* Memory */
9616 IEM_MC_BEGIN(3, 2);
9617 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9618 IEM_MC_ARG(uint16_t, u16Src, 1);
9619 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9621
9622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9623 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9624 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9625 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9626 IEM_MC_FETCH_EFLAGS(EFlags);
9627 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9628
9629 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9630 IEM_MC_COMMIT_EFLAGS(EFlags);
9631 IEM_MC_ADVANCE_RIP();
9632 IEM_MC_END();
9633 }
9634 return VINF_SUCCESS;
9635
9636}
9637
9638
9639/** Opcode 0x63.
9640 * @note This is a weird one. It works like a regular move instruction if
9641 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9642 * @todo This definitely needs a testcase to verify the odd cases. */
9643FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9644{
9645 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9646
9647 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9649
9650 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9651 {
9652 /*
9653 * Register to register.
9654 */
9655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9656 IEM_MC_BEGIN(0, 1);
9657 IEM_MC_LOCAL(uint64_t, u64Value);
9658 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9659 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9660 IEM_MC_ADVANCE_RIP();
9661 IEM_MC_END();
9662 }
9663 else
9664 {
9665 /*
9666 * We're loading a register from memory.
9667 */
9668 IEM_MC_BEGIN(0, 2);
9669 IEM_MC_LOCAL(uint64_t, u64Value);
9670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9673 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9674 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9675 IEM_MC_ADVANCE_RIP();
9676 IEM_MC_END();
9677 }
9678 return VINF_SUCCESS;
9679}
9680
9681
9682/** Opcode 0x64. */
9683FNIEMOP_DEF(iemOp_seg_FS)
9684{
9685 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9686 IEMOP_HLP_MIN_386();
9687
9688 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9689 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9690
9691 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9692 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9693}
9694
9695
9696/** Opcode 0x65. */
9697FNIEMOP_DEF(iemOp_seg_GS)
9698{
9699 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9700 IEMOP_HLP_MIN_386();
9701
9702 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9703 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9704
9705 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9706 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9707}
9708
9709
9710/** Opcode 0x66. */
9711FNIEMOP_DEF(iemOp_op_size)
9712{
9713 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9714 IEMOP_HLP_MIN_386();
9715
9716 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9717 iemRecalEffOpSize(pVCpu);
9718
9719 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9720 when REPZ or REPNZ are present. */
9721 if (pVCpu->iem.s.idxPrefix == 0)
9722 pVCpu->iem.s.idxPrefix = 1;
9723
9724 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9725 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9726}
9727
9728
9729/** Opcode 0x67. */
9730FNIEMOP_DEF(iemOp_addr_size)
9731{
9732 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9733 IEMOP_HLP_MIN_386();
9734
9735 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9736 switch (pVCpu->iem.s.enmDefAddrMode)
9737 {
9738 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9739 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9740 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9741 default: AssertFailed();
9742 }
9743
9744 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9745 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9746}
9747
9748
9749/** Opcode 0x68. */
9750FNIEMOP_DEF(iemOp_push_Iz)
9751{
9752 IEMOP_MNEMONIC(push_Iz, "push Iz");
9753 IEMOP_HLP_MIN_186();
9754 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9755 switch (pVCpu->iem.s.enmEffOpSize)
9756 {
9757 case IEMMODE_16BIT:
9758 {
9759 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9761 IEM_MC_BEGIN(0,0);
9762 IEM_MC_PUSH_U16(u16Imm);
9763 IEM_MC_ADVANCE_RIP();
9764 IEM_MC_END();
9765 return VINF_SUCCESS;
9766 }
9767
9768 case IEMMODE_32BIT:
9769 {
9770 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9772 IEM_MC_BEGIN(0,0);
9773 IEM_MC_PUSH_U32(u32Imm);
9774 IEM_MC_ADVANCE_RIP();
9775 IEM_MC_END();
9776 return VINF_SUCCESS;
9777 }
9778
9779 case IEMMODE_64BIT:
9780 {
9781 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9783 IEM_MC_BEGIN(0,0);
9784 IEM_MC_PUSH_U64(u64Imm);
9785 IEM_MC_ADVANCE_RIP();
9786 IEM_MC_END();
9787 return VINF_SUCCESS;
9788 }
9789
9790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9791 }
9792}
9793
9794
9795/** Opcode 0x69. */
9796FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9797{
9798 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9799 IEMOP_HLP_MIN_186();
9800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9801 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9802
9803 switch (pVCpu->iem.s.enmEffOpSize)
9804 {
9805 case IEMMODE_16BIT:
9806 {
9807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9808 {
9809 /* register operand */
9810 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9812
9813 IEM_MC_BEGIN(3, 1);
9814 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9815 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9816 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9817 IEM_MC_LOCAL(uint16_t, u16Tmp);
9818
9819 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9820 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9821 IEM_MC_REF_EFLAGS(pEFlags);
9822 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9823 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9824
9825 IEM_MC_ADVANCE_RIP();
9826 IEM_MC_END();
9827 }
9828 else
9829 {
9830 /* memory operand */
9831 IEM_MC_BEGIN(3, 2);
9832 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9833 IEM_MC_ARG(uint16_t, u16Src, 1);
9834 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9835 IEM_MC_LOCAL(uint16_t, u16Tmp);
9836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9837
9838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9839 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9840 IEM_MC_ASSIGN(u16Src, u16Imm);
9841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9842 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9843 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9844 IEM_MC_REF_EFLAGS(pEFlags);
9845 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9846 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9847
9848 IEM_MC_ADVANCE_RIP();
9849 IEM_MC_END();
9850 }
9851 return VINF_SUCCESS;
9852 }
9853
9854 case IEMMODE_32BIT:
9855 {
9856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9857 {
9858 /* register operand */
9859 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9861
9862 IEM_MC_BEGIN(3, 1);
9863 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9864 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9865 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9866 IEM_MC_LOCAL(uint32_t, u32Tmp);
9867
9868 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9869 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9870 IEM_MC_REF_EFLAGS(pEFlags);
9871 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9872 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9873
9874 IEM_MC_ADVANCE_RIP();
9875 IEM_MC_END();
9876 }
9877 else
9878 {
9879 /* memory operand */
9880 IEM_MC_BEGIN(3, 2);
9881 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9882 IEM_MC_ARG(uint32_t, u32Src, 1);
9883 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9884 IEM_MC_LOCAL(uint32_t, u32Tmp);
9885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9886
9887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9888 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9889 IEM_MC_ASSIGN(u32Src, u32Imm);
9890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9891 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9892 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9893 IEM_MC_REF_EFLAGS(pEFlags);
9894 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9895 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9896
9897 IEM_MC_ADVANCE_RIP();
9898 IEM_MC_END();
9899 }
9900 return VINF_SUCCESS;
9901 }
9902
9903 case IEMMODE_64BIT:
9904 {
9905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9906 {
9907 /* register operand */
9908 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9910
9911 IEM_MC_BEGIN(3, 1);
9912 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9913 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9914 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9915 IEM_MC_LOCAL(uint64_t, u64Tmp);
9916
9917 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9918 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9919 IEM_MC_REF_EFLAGS(pEFlags);
9920 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9921 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9922
9923 IEM_MC_ADVANCE_RIP();
9924 IEM_MC_END();
9925 }
9926 else
9927 {
9928 /* memory operand */
9929 IEM_MC_BEGIN(3, 2);
9930 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9931 IEM_MC_ARG(uint64_t, u64Src, 1);
9932 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9933 IEM_MC_LOCAL(uint64_t, u64Tmp);
9934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9935
9936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9937 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9938 IEM_MC_ASSIGN(u64Src, u64Imm);
9939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9940 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9941 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9942 IEM_MC_REF_EFLAGS(pEFlags);
9943 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9944 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9945
9946 IEM_MC_ADVANCE_RIP();
9947 IEM_MC_END();
9948 }
9949 return VINF_SUCCESS;
9950 }
9951 }
9952 AssertFailedReturn(VERR_IEM_IPE_9);
9953}
9954
9955
9956/** Opcode 0x6a. */
9957FNIEMOP_DEF(iemOp_push_Ib)
9958{
9959 IEMOP_MNEMONIC(push_Ib, "push Ib");
9960 IEMOP_HLP_MIN_186();
9961 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9963 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9964
9965 IEM_MC_BEGIN(0,0);
9966 switch (pVCpu->iem.s.enmEffOpSize)
9967 {
9968 case IEMMODE_16BIT:
9969 IEM_MC_PUSH_U16(i8Imm);
9970 break;
9971 case IEMMODE_32BIT:
9972 IEM_MC_PUSH_U32(i8Imm);
9973 break;
9974 case IEMMODE_64BIT:
9975 IEM_MC_PUSH_U64(i8Imm);
9976 break;
9977 }
9978 IEM_MC_ADVANCE_RIP();
9979 IEM_MC_END();
9980 return VINF_SUCCESS;
9981}
9982
9983
9984/** Opcode 0x6b. */
9985FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9986{
9987 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9988 IEMOP_HLP_MIN_186();
9989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9990 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9991
9992 switch (pVCpu->iem.s.enmEffOpSize)
9993 {
9994 case IEMMODE_16BIT:
9995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9996 {
9997 /* register operand */
9998 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10000
10001 IEM_MC_BEGIN(3, 1);
10002 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10003 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
10004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10005 IEM_MC_LOCAL(uint16_t, u16Tmp);
10006
10007 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10008 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10009 IEM_MC_REF_EFLAGS(pEFlags);
10010 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10011 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10012
10013 IEM_MC_ADVANCE_RIP();
10014 IEM_MC_END();
10015 }
10016 else
10017 {
10018 /* memory operand */
10019 IEM_MC_BEGIN(3, 2);
10020 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10021 IEM_MC_ARG(uint16_t, u16Src, 1);
10022 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10023 IEM_MC_LOCAL(uint16_t, u16Tmp);
10024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10025
10026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10027 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
10028 IEM_MC_ASSIGN(u16Src, u16Imm);
10029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10030 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10031 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10032 IEM_MC_REF_EFLAGS(pEFlags);
10033 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10034 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10035
10036 IEM_MC_ADVANCE_RIP();
10037 IEM_MC_END();
10038 }
10039 return VINF_SUCCESS;
10040
10041 case IEMMODE_32BIT:
10042 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10043 {
10044 /* register operand */
10045 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10047
10048 IEM_MC_BEGIN(3, 1);
10049 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10050 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
10051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10052 IEM_MC_LOCAL(uint32_t, u32Tmp);
10053
10054 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10055 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10056 IEM_MC_REF_EFLAGS(pEFlags);
10057 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10058 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10059
10060 IEM_MC_ADVANCE_RIP();
10061 IEM_MC_END();
10062 }
10063 else
10064 {
10065 /* memory operand */
10066 IEM_MC_BEGIN(3, 2);
10067 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10068 IEM_MC_ARG(uint32_t, u32Src, 1);
10069 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10070 IEM_MC_LOCAL(uint32_t, u32Tmp);
10071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10072
10073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10074 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
10075 IEM_MC_ASSIGN(u32Src, u32Imm);
10076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10077 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10078 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10079 IEM_MC_REF_EFLAGS(pEFlags);
10080 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10081 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10082
10083 IEM_MC_ADVANCE_RIP();
10084 IEM_MC_END();
10085 }
10086 return VINF_SUCCESS;
10087
10088 case IEMMODE_64BIT:
10089 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10090 {
10091 /* register operand */
10092 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10094
10095 IEM_MC_BEGIN(3, 1);
10096 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10097 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10098 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10099 IEM_MC_LOCAL(uint64_t, u64Tmp);
10100
10101 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10102 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10103 IEM_MC_REF_EFLAGS(pEFlags);
10104 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10105 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10106
10107 IEM_MC_ADVANCE_RIP();
10108 IEM_MC_END();
10109 }
10110 else
10111 {
10112 /* memory operand */
10113 IEM_MC_BEGIN(3, 2);
10114 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10115 IEM_MC_ARG(uint64_t, u64Src, 1);
10116 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10117 IEM_MC_LOCAL(uint64_t, u64Tmp);
10118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10119
10120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10121 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10122 IEM_MC_ASSIGN(u64Src, u64Imm);
10123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10124 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10125 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10126 IEM_MC_REF_EFLAGS(pEFlags);
10127 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10128 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10129
10130 IEM_MC_ADVANCE_RIP();
10131 IEM_MC_END();
10132 }
10133 return VINF_SUCCESS;
10134 }
10135 AssertFailedReturn(VERR_IEM_IPE_8);
10136}
10137
10138
10139/** Opcode 0x6c. */
10140FNIEMOP_DEF(iemOp_insb_Yb_DX)
10141{
10142 IEMOP_HLP_MIN_186();
10143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10144 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10145 {
10146 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10147 switch (pVCpu->iem.s.enmEffAddrMode)
10148 {
10149 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10150 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10151 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10153 }
10154 }
10155 else
10156 {
10157 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10158 switch (pVCpu->iem.s.enmEffAddrMode)
10159 {
10160 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10161 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10162 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10164 }
10165 }
10166}
10167
10168
10169/** Opcode 0x6d. */
10170FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10171{
10172 IEMOP_HLP_MIN_186();
10173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10174 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10175 {
10176 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10177 switch (pVCpu->iem.s.enmEffOpSize)
10178 {
10179 case IEMMODE_16BIT:
10180 switch (pVCpu->iem.s.enmEffAddrMode)
10181 {
10182 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10183 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10184 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10186 }
10187 break;
10188 case IEMMODE_64BIT:
10189 case IEMMODE_32BIT:
10190 switch (pVCpu->iem.s.enmEffAddrMode)
10191 {
10192 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10193 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10194 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10196 }
10197 break;
10198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10199 }
10200 }
10201 else
10202 {
10203 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10204 switch (pVCpu->iem.s.enmEffOpSize)
10205 {
10206 case IEMMODE_16BIT:
10207 switch (pVCpu->iem.s.enmEffAddrMode)
10208 {
10209 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10210 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10211 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10213 }
10214 break;
10215 case IEMMODE_64BIT:
10216 case IEMMODE_32BIT:
10217 switch (pVCpu->iem.s.enmEffAddrMode)
10218 {
10219 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10220 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10221 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10223 }
10224 break;
10225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10226 }
10227 }
10228}
10229
10230
10231/** Opcode 0x6e. */
10232FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10233{
10234 IEMOP_HLP_MIN_186();
10235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10236 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10237 {
10238 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10239 switch (pVCpu->iem.s.enmEffAddrMode)
10240 {
10241 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10242 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10243 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10245 }
10246 }
10247 else
10248 {
10249 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10250 switch (pVCpu->iem.s.enmEffAddrMode)
10251 {
10252 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10253 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10254 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10256 }
10257 }
10258}
10259
10260
10261/** Opcode 0x6f. */
10262FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10263{
10264 IEMOP_HLP_MIN_186();
10265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10266 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10267 {
10268 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10269 switch (pVCpu->iem.s.enmEffOpSize)
10270 {
10271 case IEMMODE_16BIT:
10272 switch (pVCpu->iem.s.enmEffAddrMode)
10273 {
10274 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10275 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10276 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10278 }
10279 break;
10280 case IEMMODE_64BIT:
10281 case IEMMODE_32BIT:
10282 switch (pVCpu->iem.s.enmEffAddrMode)
10283 {
10284 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10285 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10286 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10288 }
10289 break;
10290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10291 }
10292 }
10293 else
10294 {
10295 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10296 switch (pVCpu->iem.s.enmEffOpSize)
10297 {
10298 case IEMMODE_16BIT:
10299 switch (pVCpu->iem.s.enmEffAddrMode)
10300 {
10301 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10302 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10303 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10305 }
10306 break;
10307 case IEMMODE_64BIT:
10308 case IEMMODE_32BIT:
10309 switch (pVCpu->iem.s.enmEffAddrMode)
10310 {
10311 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10312 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10313 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10315 }
10316 break;
10317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10318 }
10319 }
10320}
10321
10322
10323/** Opcode 0x70. */
10324FNIEMOP_DEF(iemOp_jo_Jb)
10325{
10326 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10327 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10329 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10330
10331 IEM_MC_BEGIN(0, 0);
10332 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10333 IEM_MC_REL_JMP_S8(i8Imm);
10334 } IEM_MC_ELSE() {
10335 IEM_MC_ADVANCE_RIP();
10336 } IEM_MC_ENDIF();
10337 IEM_MC_END();
10338 return VINF_SUCCESS;
10339}
10340
10341
10342/** Opcode 0x71. */
10343FNIEMOP_DEF(iemOp_jno_Jb)
10344{
10345 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10346 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10348 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10349
10350 IEM_MC_BEGIN(0, 0);
10351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10352 IEM_MC_ADVANCE_RIP();
10353 } IEM_MC_ELSE() {
10354 IEM_MC_REL_JMP_S8(i8Imm);
10355 } IEM_MC_ENDIF();
10356 IEM_MC_END();
10357 return VINF_SUCCESS;
10358}
10359
10360/** Opcode 0x72. */
10361FNIEMOP_DEF(iemOp_jc_Jb)
10362{
10363 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10364 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10366 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10367
10368 IEM_MC_BEGIN(0, 0);
10369 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10370 IEM_MC_REL_JMP_S8(i8Imm);
10371 } IEM_MC_ELSE() {
10372 IEM_MC_ADVANCE_RIP();
10373 } IEM_MC_ENDIF();
10374 IEM_MC_END();
10375 return VINF_SUCCESS;
10376}
10377
10378
10379/** Opcode 0x73. */
10380FNIEMOP_DEF(iemOp_jnc_Jb)
10381{
10382 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10383 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10385 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10386
10387 IEM_MC_BEGIN(0, 0);
10388 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10389 IEM_MC_ADVANCE_RIP();
10390 } IEM_MC_ELSE() {
10391 IEM_MC_REL_JMP_S8(i8Imm);
10392 } IEM_MC_ENDIF();
10393 IEM_MC_END();
10394 return VINF_SUCCESS;
10395}
10396
10397
10398/** Opcode 0x74. */
10399FNIEMOP_DEF(iemOp_je_Jb)
10400{
10401 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10402 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10404 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10405
10406 IEM_MC_BEGIN(0, 0);
10407 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10408 IEM_MC_REL_JMP_S8(i8Imm);
10409 } IEM_MC_ELSE() {
10410 IEM_MC_ADVANCE_RIP();
10411 } IEM_MC_ENDIF();
10412 IEM_MC_END();
10413 return VINF_SUCCESS;
10414}
10415
10416
10417/** Opcode 0x75. */
10418FNIEMOP_DEF(iemOp_jne_Jb)
10419{
10420 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10421 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10423 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10424
10425 IEM_MC_BEGIN(0, 0);
10426 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10427 IEM_MC_ADVANCE_RIP();
10428 } IEM_MC_ELSE() {
10429 IEM_MC_REL_JMP_S8(i8Imm);
10430 } IEM_MC_ENDIF();
10431 IEM_MC_END();
10432 return VINF_SUCCESS;
10433}
10434
10435
10436/** Opcode 0x76. */
10437FNIEMOP_DEF(iemOp_jbe_Jb)
10438{
10439 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10440 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10442 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10443
10444 IEM_MC_BEGIN(0, 0);
10445 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10446 IEM_MC_REL_JMP_S8(i8Imm);
10447 } IEM_MC_ELSE() {
10448 IEM_MC_ADVANCE_RIP();
10449 } IEM_MC_ENDIF();
10450 IEM_MC_END();
10451 return VINF_SUCCESS;
10452}
10453
10454
10455/** Opcode 0x77. */
10456FNIEMOP_DEF(iemOp_jnbe_Jb)
10457{
10458 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10459 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10461 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10462
10463 IEM_MC_BEGIN(0, 0);
10464 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10465 IEM_MC_ADVANCE_RIP();
10466 } IEM_MC_ELSE() {
10467 IEM_MC_REL_JMP_S8(i8Imm);
10468 } IEM_MC_ENDIF();
10469 IEM_MC_END();
10470 return VINF_SUCCESS;
10471}
10472
10473
10474/** Opcode 0x78. */
10475FNIEMOP_DEF(iemOp_js_Jb)
10476{
10477 IEMOP_MNEMONIC(js_Jb, "js Jb");
10478 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10480 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10481
10482 IEM_MC_BEGIN(0, 0);
10483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10484 IEM_MC_REL_JMP_S8(i8Imm);
10485 } IEM_MC_ELSE() {
10486 IEM_MC_ADVANCE_RIP();
10487 } IEM_MC_ENDIF();
10488 IEM_MC_END();
10489 return VINF_SUCCESS;
10490}
10491
10492
10493/** Opcode 0x79. */
10494FNIEMOP_DEF(iemOp_jns_Jb)
10495{
10496 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10497 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10500
10501 IEM_MC_BEGIN(0, 0);
10502 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10503 IEM_MC_ADVANCE_RIP();
10504 } IEM_MC_ELSE() {
10505 IEM_MC_REL_JMP_S8(i8Imm);
10506 } IEM_MC_ENDIF();
10507 IEM_MC_END();
10508 return VINF_SUCCESS;
10509}
10510
10511
10512/** Opcode 0x7a. */
10513FNIEMOP_DEF(iemOp_jp_Jb)
10514{
10515 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10516 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10518 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10519
10520 IEM_MC_BEGIN(0, 0);
10521 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10522 IEM_MC_REL_JMP_S8(i8Imm);
10523 } IEM_MC_ELSE() {
10524 IEM_MC_ADVANCE_RIP();
10525 } IEM_MC_ENDIF();
10526 IEM_MC_END();
10527 return VINF_SUCCESS;
10528}
10529
10530
10531/** Opcode 0x7b. */
10532FNIEMOP_DEF(iemOp_jnp_Jb)
10533{
10534 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10535 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10538
10539 IEM_MC_BEGIN(0, 0);
10540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10541 IEM_MC_ADVANCE_RIP();
10542 } IEM_MC_ELSE() {
10543 IEM_MC_REL_JMP_S8(i8Imm);
10544 } IEM_MC_ENDIF();
10545 IEM_MC_END();
10546 return VINF_SUCCESS;
10547}
10548
10549
10550/** Opcode 0x7c. */
10551FNIEMOP_DEF(iemOp_jl_Jb)
10552{
10553 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10554 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10556 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10557
10558 IEM_MC_BEGIN(0, 0);
10559 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10560 IEM_MC_REL_JMP_S8(i8Imm);
10561 } IEM_MC_ELSE() {
10562 IEM_MC_ADVANCE_RIP();
10563 } IEM_MC_ENDIF();
10564 IEM_MC_END();
10565 return VINF_SUCCESS;
10566}
10567
10568
10569/** Opcode 0x7d. */
10570FNIEMOP_DEF(iemOp_jnl_Jb)
10571{
10572 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10573 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10575 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10576
10577 IEM_MC_BEGIN(0, 0);
10578 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10579 IEM_MC_ADVANCE_RIP();
10580 } IEM_MC_ELSE() {
10581 IEM_MC_REL_JMP_S8(i8Imm);
10582 } IEM_MC_ENDIF();
10583 IEM_MC_END();
10584 return VINF_SUCCESS;
10585}
10586
10587
10588/** Opcode 0x7e. */
10589FNIEMOP_DEF(iemOp_jle_Jb)
10590{
10591 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10592 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10594 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10595
10596 IEM_MC_BEGIN(0, 0);
10597 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10598 IEM_MC_REL_JMP_S8(i8Imm);
10599 } IEM_MC_ELSE() {
10600 IEM_MC_ADVANCE_RIP();
10601 } IEM_MC_ENDIF();
10602 IEM_MC_END();
10603 return VINF_SUCCESS;
10604}
10605
10606
10607/** Opcode 0x7f. */
10608FNIEMOP_DEF(iemOp_jnle_Jb)
10609{
10610 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10611 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10613 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10614
10615 IEM_MC_BEGIN(0, 0);
10616 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10617 IEM_MC_ADVANCE_RIP();
10618 } IEM_MC_ELSE() {
10619 IEM_MC_REL_JMP_S8(i8Imm);
10620 } IEM_MC_ENDIF();
10621 IEM_MC_END();
10622 return VINF_SUCCESS;
10623}
10624
10625
10626/** Opcode 0x80. */
10627FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10628{
10629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10630 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10631 {
10632 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10633 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10634 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10635 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10636 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10637 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10638 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10639 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10640 }
10641 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10642
10643 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10644 {
10645 /* register target */
10646 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10648 IEM_MC_BEGIN(3, 0);
10649 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10650 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10651 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10652
10653 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10654 IEM_MC_REF_EFLAGS(pEFlags);
10655 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10656
10657 IEM_MC_ADVANCE_RIP();
10658 IEM_MC_END();
10659 }
10660 else
10661 {
10662 /* memory target */
10663 uint32_t fAccess;
10664 if (pImpl->pfnLockedU8)
10665 fAccess = IEM_ACCESS_DATA_RW;
10666 else /* CMP */
10667 fAccess = IEM_ACCESS_DATA_R;
10668 IEM_MC_BEGIN(3, 2);
10669 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10670 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10672
10673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10674 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10675 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10676 if (pImpl->pfnLockedU8)
10677 IEMOP_HLP_DONE_DECODING();
10678 else
10679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10680
10681 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10682 IEM_MC_FETCH_EFLAGS(EFlags);
10683 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10684 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10685 else
10686 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10687
10688 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10689 IEM_MC_COMMIT_EFLAGS(EFlags);
10690 IEM_MC_ADVANCE_RIP();
10691 IEM_MC_END();
10692 }
10693 return VINF_SUCCESS;
10694}
10695
10696
10697/** Opcode 0x81. */
10698FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10699{
10700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10701 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10702 {
10703 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10704 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10705 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10706 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10707 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10708 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10709 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10710 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10711 }
10712 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10713
10714 switch (pVCpu->iem.s.enmEffOpSize)
10715 {
10716 case IEMMODE_16BIT:
10717 {
10718 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10719 {
10720 /* register target */
10721 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10723 IEM_MC_BEGIN(3, 0);
10724 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10725 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10726 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10727
10728 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10729 IEM_MC_REF_EFLAGS(pEFlags);
10730 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10731
10732 IEM_MC_ADVANCE_RIP();
10733 IEM_MC_END();
10734 }
10735 else
10736 {
10737 /* memory target */
10738 uint32_t fAccess;
10739 if (pImpl->pfnLockedU16)
10740 fAccess = IEM_ACCESS_DATA_RW;
10741 else /* CMP, TEST */
10742 fAccess = IEM_ACCESS_DATA_R;
10743 IEM_MC_BEGIN(3, 2);
10744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10745 IEM_MC_ARG(uint16_t, u16Src, 1);
10746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10748
10749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10750 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10751 IEM_MC_ASSIGN(u16Src, u16Imm);
10752 if (pImpl->pfnLockedU16)
10753 IEMOP_HLP_DONE_DECODING();
10754 else
10755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10756 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10757 IEM_MC_FETCH_EFLAGS(EFlags);
10758 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10759 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10760 else
10761 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10762
10763 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10764 IEM_MC_COMMIT_EFLAGS(EFlags);
10765 IEM_MC_ADVANCE_RIP();
10766 IEM_MC_END();
10767 }
10768 break;
10769 }
10770
10771 case IEMMODE_32BIT:
10772 {
10773 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10774 {
10775 /* register target */
10776 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10778 IEM_MC_BEGIN(3, 0);
10779 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10780 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10781 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10782
10783 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10784 IEM_MC_REF_EFLAGS(pEFlags);
10785 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10786 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10787
10788 IEM_MC_ADVANCE_RIP();
10789 IEM_MC_END();
10790 }
10791 else
10792 {
10793 /* memory target */
10794 uint32_t fAccess;
10795 if (pImpl->pfnLockedU32)
10796 fAccess = IEM_ACCESS_DATA_RW;
10797 else /* CMP, TEST */
10798 fAccess = IEM_ACCESS_DATA_R;
10799 IEM_MC_BEGIN(3, 2);
10800 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10801 IEM_MC_ARG(uint32_t, u32Src, 1);
10802 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10804
10805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10806 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10807 IEM_MC_ASSIGN(u32Src, u32Imm);
10808 if (pImpl->pfnLockedU32)
10809 IEMOP_HLP_DONE_DECODING();
10810 else
10811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10812 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10813 IEM_MC_FETCH_EFLAGS(EFlags);
10814 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10815 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10816 else
10817 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10818
10819 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10820 IEM_MC_COMMIT_EFLAGS(EFlags);
10821 IEM_MC_ADVANCE_RIP();
10822 IEM_MC_END();
10823 }
10824 break;
10825 }
10826
10827 case IEMMODE_64BIT:
10828 {
10829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10830 {
10831 /* register target */
10832 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10834 IEM_MC_BEGIN(3, 0);
10835 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10836 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10837 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10838
10839 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10840 IEM_MC_REF_EFLAGS(pEFlags);
10841 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10842
10843 IEM_MC_ADVANCE_RIP();
10844 IEM_MC_END();
10845 }
10846 else
10847 {
10848 /* memory target */
10849 uint32_t fAccess;
10850 if (pImpl->pfnLockedU64)
10851 fAccess = IEM_ACCESS_DATA_RW;
10852 else /* CMP */
10853 fAccess = IEM_ACCESS_DATA_R;
10854 IEM_MC_BEGIN(3, 2);
10855 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10856 IEM_MC_ARG(uint64_t, u64Src, 1);
10857 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10859
10860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10861 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10862 if (pImpl->pfnLockedU64)
10863 IEMOP_HLP_DONE_DECODING();
10864 else
10865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10866 IEM_MC_ASSIGN(u64Src, u64Imm);
10867 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10868 IEM_MC_FETCH_EFLAGS(EFlags);
10869 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10870 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10871 else
10872 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10873
10874 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10875 IEM_MC_COMMIT_EFLAGS(EFlags);
10876 IEM_MC_ADVANCE_RIP();
10877 IEM_MC_END();
10878 }
10879 break;
10880 }
10881 }
10882 return VINF_SUCCESS;
10883}
10884
10885
10886/** Opcode 0x82. */
10887FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10888{
10889 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10890 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10891}
10892
10893
10894/** Opcode 0x83. */
10895FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10896{
10897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10898 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10899 {
10900 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10901 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10902 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10903 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10904 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10905 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10906 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10907 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10908 }
10909 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10910 to the 386 even if absent in the intel reference manuals and some
10911 3rd party opcode listings. */
10912 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10913
10914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10915 {
10916 /*
10917 * Register target
10918 */
10919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10920 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10921 switch (pVCpu->iem.s.enmEffOpSize)
10922 {
10923 case IEMMODE_16BIT:
10924 {
10925 IEM_MC_BEGIN(3, 0);
10926 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10927 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10928 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10929
10930 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10931 IEM_MC_REF_EFLAGS(pEFlags);
10932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10933
10934 IEM_MC_ADVANCE_RIP();
10935 IEM_MC_END();
10936 break;
10937 }
10938
10939 case IEMMODE_32BIT:
10940 {
10941 IEM_MC_BEGIN(3, 0);
10942 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10943 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10944 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10945
10946 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10947 IEM_MC_REF_EFLAGS(pEFlags);
10948 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10949 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10950
10951 IEM_MC_ADVANCE_RIP();
10952 IEM_MC_END();
10953 break;
10954 }
10955
10956 case IEMMODE_64BIT:
10957 {
10958 IEM_MC_BEGIN(3, 0);
10959 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10960 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10961 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10962
10963 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10964 IEM_MC_REF_EFLAGS(pEFlags);
10965 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10966
10967 IEM_MC_ADVANCE_RIP();
10968 IEM_MC_END();
10969 break;
10970 }
10971 }
10972 }
10973 else
10974 {
10975 /*
10976 * Memory target.
10977 */
10978 uint32_t fAccess;
10979 if (pImpl->pfnLockedU16)
10980 fAccess = IEM_ACCESS_DATA_RW;
10981 else /* CMP */
10982 fAccess = IEM_ACCESS_DATA_R;
10983
10984 switch (pVCpu->iem.s.enmEffOpSize)
10985 {
10986 case IEMMODE_16BIT:
10987 {
10988 IEM_MC_BEGIN(3, 2);
10989 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10990 IEM_MC_ARG(uint16_t, u16Src, 1);
10991 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10993
10994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10995 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10996 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10997 if (pImpl->pfnLockedU16)
10998 IEMOP_HLP_DONE_DECODING();
10999 else
11000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11001 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11002 IEM_MC_FETCH_EFLAGS(EFlags);
11003 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11004 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11005 else
11006 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
11007
11008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
11009 IEM_MC_COMMIT_EFLAGS(EFlags);
11010 IEM_MC_ADVANCE_RIP();
11011 IEM_MC_END();
11012 break;
11013 }
11014
11015 case IEMMODE_32BIT:
11016 {
11017 IEM_MC_BEGIN(3, 2);
11018 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11019 IEM_MC_ARG(uint32_t, u32Src, 1);
11020 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11022
11023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11024 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11025 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
11026 if (pImpl->pfnLockedU32)
11027 IEMOP_HLP_DONE_DECODING();
11028 else
11029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11030 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11031 IEM_MC_FETCH_EFLAGS(EFlags);
11032 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11033 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11034 else
11035 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
11036
11037 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
11038 IEM_MC_COMMIT_EFLAGS(EFlags);
11039 IEM_MC_ADVANCE_RIP();
11040 IEM_MC_END();
11041 break;
11042 }
11043
11044 case IEMMODE_64BIT:
11045 {
11046 IEM_MC_BEGIN(3, 2);
11047 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11048 IEM_MC_ARG(uint64_t, u64Src, 1);
11049 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11051
11052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11053 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11054 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
11055 if (pImpl->pfnLockedU64)
11056 IEMOP_HLP_DONE_DECODING();
11057 else
11058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11059 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11060 IEM_MC_FETCH_EFLAGS(EFlags);
11061 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11062 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11063 else
11064 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11065
11066 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11067 IEM_MC_COMMIT_EFLAGS(EFlags);
11068 IEM_MC_ADVANCE_RIP();
11069 IEM_MC_END();
11070 break;
11071 }
11072 }
11073 }
11074 return VINF_SUCCESS;
11075}
11076
11077
11078/** Opcode 0x84. */
11079FNIEMOP_DEF(iemOp_test_Eb_Gb)
11080{
11081 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11082 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11083 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11084}
11085
11086
11087/** Opcode 0x85. */
11088FNIEMOP_DEF(iemOp_test_Ev_Gv)
11089{
11090 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11091 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11092 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11093}
11094
11095
11096/** Opcode 0x86. */
11097FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11098{
11099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11100 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11101
11102 /*
11103 * If rm is denoting a register, no more instruction bytes.
11104 */
11105 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11106 {
11107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11108
11109 IEM_MC_BEGIN(0, 2);
11110 IEM_MC_LOCAL(uint8_t, uTmp1);
11111 IEM_MC_LOCAL(uint8_t, uTmp2);
11112
11113 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11114 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11115 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11116 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11117
11118 IEM_MC_ADVANCE_RIP();
11119 IEM_MC_END();
11120 }
11121 else
11122 {
11123 /*
11124 * We're accessing memory.
11125 */
11126/** @todo the register must be committed separately! */
11127 IEM_MC_BEGIN(2, 2);
11128 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11129 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11131
11132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11133 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11134 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11135 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11137
11138 IEM_MC_ADVANCE_RIP();
11139 IEM_MC_END();
11140 }
11141 return VINF_SUCCESS;
11142}
11143
11144
11145/** Opcode 0x87. */
11146FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11147{
11148 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11150
11151 /*
11152 * If rm is denoting a register, no more instruction bytes.
11153 */
11154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11155 {
11156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11157
11158 switch (pVCpu->iem.s.enmEffOpSize)
11159 {
11160 case IEMMODE_16BIT:
11161 IEM_MC_BEGIN(0, 2);
11162 IEM_MC_LOCAL(uint16_t, uTmp1);
11163 IEM_MC_LOCAL(uint16_t, uTmp2);
11164
11165 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11166 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11167 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11168 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11169
11170 IEM_MC_ADVANCE_RIP();
11171 IEM_MC_END();
11172 return VINF_SUCCESS;
11173
11174 case IEMMODE_32BIT:
11175 IEM_MC_BEGIN(0, 2);
11176 IEM_MC_LOCAL(uint32_t, uTmp1);
11177 IEM_MC_LOCAL(uint32_t, uTmp2);
11178
11179 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11180 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11181 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11182 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11183
11184 IEM_MC_ADVANCE_RIP();
11185 IEM_MC_END();
11186 return VINF_SUCCESS;
11187
11188 case IEMMODE_64BIT:
11189 IEM_MC_BEGIN(0, 2);
11190 IEM_MC_LOCAL(uint64_t, uTmp1);
11191 IEM_MC_LOCAL(uint64_t, uTmp2);
11192
11193 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11194 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11195 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11196 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11197
11198 IEM_MC_ADVANCE_RIP();
11199 IEM_MC_END();
11200 return VINF_SUCCESS;
11201
11202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11203 }
11204 }
11205 else
11206 {
11207 /*
11208 * We're accessing memory.
11209 */
11210 switch (pVCpu->iem.s.enmEffOpSize)
11211 {
11212/** @todo the register must be committed separately! */
11213 case IEMMODE_16BIT:
11214 IEM_MC_BEGIN(2, 2);
11215 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11216 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11218
11219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11220 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11221 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11222 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11223 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11224
11225 IEM_MC_ADVANCE_RIP();
11226 IEM_MC_END();
11227 return VINF_SUCCESS;
11228
11229 case IEMMODE_32BIT:
11230 IEM_MC_BEGIN(2, 2);
11231 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11232 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11234
11235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11236 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11237 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11238 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11240
11241 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11242 IEM_MC_ADVANCE_RIP();
11243 IEM_MC_END();
11244 return VINF_SUCCESS;
11245
11246 case IEMMODE_64BIT:
11247 IEM_MC_BEGIN(2, 2);
11248 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11249 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11251
11252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11253 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11254 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11255 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11256 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11257
11258 IEM_MC_ADVANCE_RIP();
11259 IEM_MC_END();
11260 return VINF_SUCCESS;
11261
11262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11263 }
11264 }
11265}
11266
11267
11268/** Opcode 0x88. */
11269FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11270{
11271 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11272
11273 uint8_t bRm;
11274 IEM_OPCODE_GET_NEXT_U8(&bRm);
11275
11276 /*
11277 * If rm is denoting a register, no more instruction bytes.
11278 */
11279 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11280 {
11281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11282 IEM_MC_BEGIN(0, 1);
11283 IEM_MC_LOCAL(uint8_t, u8Value);
11284 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11285 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11286 IEM_MC_ADVANCE_RIP();
11287 IEM_MC_END();
11288 }
11289 else
11290 {
11291 /*
11292 * We're writing a register to memory.
11293 */
11294 IEM_MC_BEGIN(0, 2);
11295 IEM_MC_LOCAL(uint8_t, u8Value);
11296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11299 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11300 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11301 IEM_MC_ADVANCE_RIP();
11302 IEM_MC_END();
11303 }
11304 return VINF_SUCCESS;
11305
11306}
11307
11308
11309/** Opcode 0x89. */
11310FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11311{
11312 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11313
11314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11315
11316 /*
11317 * If rm is denoting a register, no more instruction bytes.
11318 */
11319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11320 {
11321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11322 switch (pVCpu->iem.s.enmEffOpSize)
11323 {
11324 case IEMMODE_16BIT:
11325 IEM_MC_BEGIN(0, 1);
11326 IEM_MC_LOCAL(uint16_t, u16Value);
11327 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11328 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11329 IEM_MC_ADVANCE_RIP();
11330 IEM_MC_END();
11331 break;
11332
11333 case IEMMODE_32BIT:
11334 IEM_MC_BEGIN(0, 1);
11335 IEM_MC_LOCAL(uint32_t, u32Value);
11336 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11337 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11338 IEM_MC_ADVANCE_RIP();
11339 IEM_MC_END();
11340 break;
11341
11342 case IEMMODE_64BIT:
11343 IEM_MC_BEGIN(0, 1);
11344 IEM_MC_LOCAL(uint64_t, u64Value);
11345 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11346 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11347 IEM_MC_ADVANCE_RIP();
11348 IEM_MC_END();
11349 break;
11350 }
11351 }
11352 else
11353 {
11354 /*
11355 * We're writing a register to memory.
11356 */
11357 switch (pVCpu->iem.s.enmEffOpSize)
11358 {
11359 case IEMMODE_16BIT:
11360 IEM_MC_BEGIN(0, 2);
11361 IEM_MC_LOCAL(uint16_t, u16Value);
11362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11365 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11366 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11367 IEM_MC_ADVANCE_RIP();
11368 IEM_MC_END();
11369 break;
11370
11371 case IEMMODE_32BIT:
11372 IEM_MC_BEGIN(0, 2);
11373 IEM_MC_LOCAL(uint32_t, u32Value);
11374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11377 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11378 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11379 IEM_MC_ADVANCE_RIP();
11380 IEM_MC_END();
11381 break;
11382
11383 case IEMMODE_64BIT:
11384 IEM_MC_BEGIN(0, 2);
11385 IEM_MC_LOCAL(uint64_t, u64Value);
11386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11389 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11390 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11391 IEM_MC_ADVANCE_RIP();
11392 IEM_MC_END();
11393 break;
11394 }
11395 }
11396 return VINF_SUCCESS;
11397}
11398
11399
11400/** Opcode 0x8a. */
11401FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11402{
11403 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11404
11405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11406
11407 /*
11408 * If rm is denoting a register, no more instruction bytes.
11409 */
11410 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11411 {
11412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11413 IEM_MC_BEGIN(0, 1);
11414 IEM_MC_LOCAL(uint8_t, u8Value);
11415 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11416 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11417 IEM_MC_ADVANCE_RIP();
11418 IEM_MC_END();
11419 }
11420 else
11421 {
11422 /*
11423 * We're loading a register from memory.
11424 */
11425 IEM_MC_BEGIN(0, 2);
11426 IEM_MC_LOCAL(uint8_t, u8Value);
11427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11430 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11431 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11432 IEM_MC_ADVANCE_RIP();
11433 IEM_MC_END();
11434 }
11435 return VINF_SUCCESS;
11436}
11437
11438
11439/** Opcode 0x8b. */
11440FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11441{
11442 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11443
11444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11445
11446 /*
11447 * If rm is denoting a register, no more instruction bytes.
11448 */
11449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11450 {
11451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11452 switch (pVCpu->iem.s.enmEffOpSize)
11453 {
11454 case IEMMODE_16BIT:
11455 IEM_MC_BEGIN(0, 1);
11456 IEM_MC_LOCAL(uint16_t, u16Value);
11457 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11458 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11459 IEM_MC_ADVANCE_RIP();
11460 IEM_MC_END();
11461 break;
11462
11463 case IEMMODE_32BIT:
11464 IEM_MC_BEGIN(0, 1);
11465 IEM_MC_LOCAL(uint32_t, u32Value);
11466 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11467 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11468 IEM_MC_ADVANCE_RIP();
11469 IEM_MC_END();
11470 break;
11471
11472 case IEMMODE_64BIT:
11473 IEM_MC_BEGIN(0, 1);
11474 IEM_MC_LOCAL(uint64_t, u64Value);
11475 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11476 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11477 IEM_MC_ADVANCE_RIP();
11478 IEM_MC_END();
11479 break;
11480 }
11481 }
11482 else
11483 {
11484 /*
11485 * We're loading a register from memory.
11486 */
11487 switch (pVCpu->iem.s.enmEffOpSize)
11488 {
11489 case IEMMODE_16BIT:
11490 IEM_MC_BEGIN(0, 2);
11491 IEM_MC_LOCAL(uint16_t, u16Value);
11492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11495 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11496 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11497 IEM_MC_ADVANCE_RIP();
11498 IEM_MC_END();
11499 break;
11500
11501 case IEMMODE_32BIT:
11502 IEM_MC_BEGIN(0, 2);
11503 IEM_MC_LOCAL(uint32_t, u32Value);
11504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11507 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11508 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11509 IEM_MC_ADVANCE_RIP();
11510 IEM_MC_END();
11511 break;
11512
11513 case IEMMODE_64BIT:
11514 IEM_MC_BEGIN(0, 2);
11515 IEM_MC_LOCAL(uint64_t, u64Value);
11516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11519 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11520 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11521 IEM_MC_ADVANCE_RIP();
11522 IEM_MC_END();
11523 break;
11524 }
11525 }
11526 return VINF_SUCCESS;
11527}
11528
11529
11530/** Opcode 0x63. */
11531FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11532{
11533 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11534 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11535 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11536 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11537 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11538}
11539
11540
11541/** Opcode 0x8c. */
11542FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11543{
11544 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11545
11546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11547
11548 /*
11549 * Check that the destination register exists. The REX.R prefix is ignored.
11550 */
11551 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11552 if ( iSegReg > X86_SREG_GS)
11553 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11554
11555 /*
11556 * If rm is denoting a register, no more instruction bytes.
11557 * In that case, the operand size is respected and the upper bits are
11558 * cleared (starting with some pentium).
11559 */
11560 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11561 {
11562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11563 switch (pVCpu->iem.s.enmEffOpSize)
11564 {
11565 case IEMMODE_16BIT:
11566 IEM_MC_BEGIN(0, 1);
11567 IEM_MC_LOCAL(uint16_t, u16Value);
11568 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11569 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11570 IEM_MC_ADVANCE_RIP();
11571 IEM_MC_END();
11572 break;
11573
11574 case IEMMODE_32BIT:
11575 IEM_MC_BEGIN(0, 1);
11576 IEM_MC_LOCAL(uint32_t, u32Value);
11577 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11578 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11579 IEM_MC_ADVANCE_RIP();
11580 IEM_MC_END();
11581 break;
11582
11583 case IEMMODE_64BIT:
11584 IEM_MC_BEGIN(0, 1);
11585 IEM_MC_LOCAL(uint64_t, u64Value);
11586 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11587 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11588 IEM_MC_ADVANCE_RIP();
11589 IEM_MC_END();
11590 break;
11591 }
11592 }
11593 else
11594 {
11595 /*
11596 * We're saving the register to memory. The access is word sized
11597 * regardless of operand size prefixes.
11598 */
11599#if 0 /* not necessary */
11600 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11601#endif
11602 IEM_MC_BEGIN(0, 2);
11603 IEM_MC_LOCAL(uint16_t, u16Value);
11604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11607 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11608 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11609 IEM_MC_ADVANCE_RIP();
11610 IEM_MC_END();
11611 }
11612 return VINF_SUCCESS;
11613}
11614
11615
11616
11617
11618/** Opcode 0x8d. */
11619FNIEMOP_DEF(iemOp_lea_Gv_M)
11620{
11621 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11623 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11624 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11625
11626 switch (pVCpu->iem.s.enmEffOpSize)
11627 {
11628 case IEMMODE_16BIT:
11629 IEM_MC_BEGIN(0, 2);
11630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11631 IEM_MC_LOCAL(uint16_t, u16Cast);
11632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11634 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11635 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11636 IEM_MC_ADVANCE_RIP();
11637 IEM_MC_END();
11638 return VINF_SUCCESS;
11639
11640 case IEMMODE_32BIT:
11641 IEM_MC_BEGIN(0, 2);
11642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11643 IEM_MC_LOCAL(uint32_t, u32Cast);
11644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11646 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11647 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11648 IEM_MC_ADVANCE_RIP();
11649 IEM_MC_END();
11650 return VINF_SUCCESS;
11651
11652 case IEMMODE_64BIT:
11653 IEM_MC_BEGIN(0, 1);
11654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11657 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11658 IEM_MC_ADVANCE_RIP();
11659 IEM_MC_END();
11660 return VINF_SUCCESS;
11661 }
11662 AssertFailedReturn(VERR_IEM_IPE_7);
11663}
11664
11665
11666/** Opcode 0x8e. */
11667FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11668{
11669 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11670
11671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11672
11673 /*
11674 * The practical operand size is 16-bit.
11675 */
11676#if 0 /* not necessary */
11677 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11678#endif
11679
11680 /*
11681 * Check that the destination register exists and can be used with this
11682 * instruction. The REX.R prefix is ignored.
11683 */
11684 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11685 if ( iSegReg == X86_SREG_CS
11686 || iSegReg > X86_SREG_GS)
11687 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11688
11689 /*
11690 * If rm is denoting a register, no more instruction bytes.
11691 */
11692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11693 {
11694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11695 IEM_MC_BEGIN(2, 0);
11696 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11697 IEM_MC_ARG(uint16_t, u16Value, 1);
11698 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11699 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11700 IEM_MC_END();
11701 }
11702 else
11703 {
11704 /*
11705 * We're loading the register from memory. The access is word sized
11706 * regardless of operand size prefixes.
11707 */
11708 IEM_MC_BEGIN(2, 1);
11709 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11710 IEM_MC_ARG(uint16_t, u16Value, 1);
11711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11714 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11715 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11716 IEM_MC_END();
11717 }
11718 return VINF_SUCCESS;
11719}
11720
11721
11722/** Opcode 0x8f /0. */
11723FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11724{
11725 /* This bugger is rather annoying as it requires rSP to be updated before
11726 doing the effective address calculations. Will eventually require a
11727 split between the R/M+SIB decoding and the effective address
11728 calculation - which is something that is required for any attempt at
11729 reusing this code for a recompiler. It may also be good to have if we
11730 need to delay #UD exception caused by invalid lock prefixes.
11731
11732 For now, we'll do a mostly safe interpreter-only implementation here. */
11733 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11734 * now until tests show it's checked.. */
11735 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11736
11737 /* Register access is relatively easy and can share code. */
11738 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11739 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11740
11741 /*
11742 * Memory target.
11743 *
11744 * Intel says that RSP is incremented before it's used in any effective
11745 * address calcuations. This means some serious extra annoyance here since
11746 * we decode and calculate the effective address in one step and like to
11747 * delay committing registers till everything is done.
11748 *
11749 * So, we'll decode and calculate the effective address twice. This will
11750 * require some recoding if turned into a recompiler.
11751 */
11752 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11753
11754#ifndef TST_IEM_CHECK_MC
11755 /* Calc effective address with modified ESP. */
11756/** @todo testcase */
11757 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11758 RTGCPTR GCPtrEff;
11759 VBOXSTRICTRC rcStrict;
11760 switch (pVCpu->iem.s.enmEffOpSize)
11761 {
11762 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11763 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11764 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11766 }
11767 if (rcStrict != VINF_SUCCESS)
11768 return rcStrict;
11769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11770
11771 /* Perform the operation - this should be CImpl. */
11772 RTUINT64U TmpRsp;
11773 TmpRsp.u = pCtx->rsp;
11774 switch (pVCpu->iem.s.enmEffOpSize)
11775 {
11776 case IEMMODE_16BIT:
11777 {
11778 uint16_t u16Value;
11779 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11780 if (rcStrict == VINF_SUCCESS)
11781 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11782 break;
11783 }
11784
11785 case IEMMODE_32BIT:
11786 {
11787 uint32_t u32Value;
11788 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11789 if (rcStrict == VINF_SUCCESS)
11790 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11791 break;
11792 }
11793
11794 case IEMMODE_64BIT:
11795 {
11796 uint64_t u64Value;
11797 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11798 if (rcStrict == VINF_SUCCESS)
11799 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11800 break;
11801 }
11802
11803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11804 }
11805 if (rcStrict == VINF_SUCCESS)
11806 {
11807 pCtx->rsp = TmpRsp.u;
11808 iemRegUpdateRipAndClearRF(pVCpu);
11809 }
11810 return rcStrict;
11811
11812#else
11813 return VERR_IEM_IPE_2;
11814#endif
11815}
11816
11817
11818/** Opcode 0x8f. */
11819FNIEMOP_DEF(iemOp_Grp1A)
11820{
11821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11822 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11823 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11824
11825 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11826 /** @todo XOP decoding. */
11827 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11828 return IEMOP_RAISE_INVALID_OPCODE();
11829}
11830
11831
11832/**
11833 * Common 'xchg reg,rAX' helper.
11834 */
11835FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11836{
11837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11838
11839 iReg |= pVCpu->iem.s.uRexB;
11840 switch (pVCpu->iem.s.enmEffOpSize)
11841 {
11842 case IEMMODE_16BIT:
11843 IEM_MC_BEGIN(0, 2);
11844 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11845 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11846 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11847 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11848 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11849 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11850 IEM_MC_ADVANCE_RIP();
11851 IEM_MC_END();
11852 return VINF_SUCCESS;
11853
11854 case IEMMODE_32BIT:
11855 IEM_MC_BEGIN(0, 2);
11856 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11857 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11858 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11859 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11860 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11861 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11862 IEM_MC_ADVANCE_RIP();
11863 IEM_MC_END();
11864 return VINF_SUCCESS;
11865
11866 case IEMMODE_64BIT:
11867 IEM_MC_BEGIN(0, 2);
11868 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11869 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11870 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11871 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11872 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11873 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11874 IEM_MC_ADVANCE_RIP();
11875 IEM_MC_END();
11876 return VINF_SUCCESS;
11877
11878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11879 }
11880}
11881
11882
11883/** Opcode 0x90. */
11884FNIEMOP_DEF(iemOp_nop)
11885{
11886 /* R8/R8D and RAX/EAX can be exchanged. */
11887 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11888 {
11889 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11890 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11891 }
11892
11893 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11894 IEMOP_MNEMONIC(pause, "pause");
11895 else
11896 IEMOP_MNEMONIC(nop, "nop");
11897 IEM_MC_BEGIN(0, 0);
11898 IEM_MC_ADVANCE_RIP();
11899 IEM_MC_END();
11900 return VINF_SUCCESS;
11901}
11902
11903
11904/** Opcode 0x91. */
11905FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11906{
11907 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11908 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11909}
11910
11911
11912/** Opcode 0x92. */
11913FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11914{
11915 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11916 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11917}
11918
11919
11920/** Opcode 0x93. */
11921FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11922{
11923 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11924 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11925}
11926
11927
11928/** Opcode 0x94. */
11929FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11930{
11931 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11932 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11933}
11934
11935
11936/** Opcode 0x95. */
11937FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11938{
11939 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11940 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11941}
11942
11943
11944/** Opcode 0x96. */
11945FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11946{
11947 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11948 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11949}
11950
11951
11952/** Opcode 0x97. */
11953FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11954{
11955 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11956 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11957}
11958
11959
11960/** Opcode 0x98. */
11961FNIEMOP_DEF(iemOp_cbw)
11962{
11963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11964 switch (pVCpu->iem.s.enmEffOpSize)
11965 {
11966 case IEMMODE_16BIT:
11967 IEMOP_MNEMONIC(cbw, "cbw");
11968 IEM_MC_BEGIN(0, 1);
11969 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11970 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11971 } IEM_MC_ELSE() {
11972 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11973 } IEM_MC_ENDIF();
11974 IEM_MC_ADVANCE_RIP();
11975 IEM_MC_END();
11976 return VINF_SUCCESS;
11977
11978 case IEMMODE_32BIT:
11979 IEMOP_MNEMONIC(cwde, "cwde");
11980 IEM_MC_BEGIN(0, 1);
11981 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11982 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11983 } IEM_MC_ELSE() {
11984 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11985 } IEM_MC_ENDIF();
11986 IEM_MC_ADVANCE_RIP();
11987 IEM_MC_END();
11988 return VINF_SUCCESS;
11989
11990 case IEMMODE_64BIT:
11991 IEMOP_MNEMONIC(cdqe, "cdqe");
11992 IEM_MC_BEGIN(0, 1);
11993 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11994 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11995 } IEM_MC_ELSE() {
11996 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11997 } IEM_MC_ENDIF();
11998 IEM_MC_ADVANCE_RIP();
11999 IEM_MC_END();
12000 return VINF_SUCCESS;
12001
12002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12003 }
12004}
12005
12006
12007/** Opcode 0x99. */
12008FNIEMOP_DEF(iemOp_cwd)
12009{
12010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12011 switch (pVCpu->iem.s.enmEffOpSize)
12012 {
12013 case IEMMODE_16BIT:
12014 IEMOP_MNEMONIC(cwd, "cwd");
12015 IEM_MC_BEGIN(0, 1);
12016 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
12017 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
12018 } IEM_MC_ELSE() {
12019 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
12020 } IEM_MC_ENDIF();
12021 IEM_MC_ADVANCE_RIP();
12022 IEM_MC_END();
12023 return VINF_SUCCESS;
12024
12025 case IEMMODE_32BIT:
12026 IEMOP_MNEMONIC(cdq, "cdq");
12027 IEM_MC_BEGIN(0, 1);
12028 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12029 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
12030 } IEM_MC_ELSE() {
12031 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
12032 } IEM_MC_ENDIF();
12033 IEM_MC_ADVANCE_RIP();
12034 IEM_MC_END();
12035 return VINF_SUCCESS;
12036
12037 case IEMMODE_64BIT:
12038 IEMOP_MNEMONIC(cqo, "cqo");
12039 IEM_MC_BEGIN(0, 1);
12040 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
12041 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
12042 } IEM_MC_ELSE() {
12043 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
12044 } IEM_MC_ENDIF();
12045 IEM_MC_ADVANCE_RIP();
12046 IEM_MC_END();
12047 return VINF_SUCCESS;
12048
12049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12050 }
12051}
12052
12053
12054/** Opcode 0x9a. */
12055FNIEMOP_DEF(iemOp_call_Ap)
12056{
12057 IEMOP_MNEMONIC(call_Ap, "call Ap");
12058 IEMOP_HLP_NO_64BIT();
12059
12060 /* Decode the far pointer address and pass it on to the far call C implementation. */
12061 uint32_t offSeg;
12062 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12063 IEM_OPCODE_GET_NEXT_U32(&offSeg);
12064 else
12065 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
12066 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
12067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12068 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
12069}
12070
12071
12072/** Opcode 0x9b. (aka fwait) */
12073FNIEMOP_DEF(iemOp_wait)
12074{
12075 IEMOP_MNEMONIC(wait, "wait");
12076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12077
12078 IEM_MC_BEGIN(0, 0);
12079 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12081 IEM_MC_ADVANCE_RIP();
12082 IEM_MC_END();
12083 return VINF_SUCCESS;
12084}
12085
12086
12087/** Opcode 0x9c. */
12088FNIEMOP_DEF(iemOp_pushf_Fv)
12089{
12090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12091 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12092 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12093}
12094
12095
12096/** Opcode 0x9d. */
12097FNIEMOP_DEF(iemOp_popf_Fv)
12098{
12099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12100 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12101 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12102}
12103
12104
12105/** Opcode 0x9e. */
12106FNIEMOP_DEF(iemOp_sahf)
12107{
12108 IEMOP_MNEMONIC(sahf, "sahf");
12109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12110 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12111 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12112 return IEMOP_RAISE_INVALID_OPCODE();
12113 IEM_MC_BEGIN(0, 2);
12114 IEM_MC_LOCAL(uint32_t, u32Flags);
12115 IEM_MC_LOCAL(uint32_t, EFlags);
12116 IEM_MC_FETCH_EFLAGS(EFlags);
12117 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12118 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12119 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12120 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12121 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12122 IEM_MC_COMMIT_EFLAGS(EFlags);
12123 IEM_MC_ADVANCE_RIP();
12124 IEM_MC_END();
12125 return VINF_SUCCESS;
12126}
12127
12128
12129/** Opcode 0x9f. */
12130FNIEMOP_DEF(iemOp_lahf)
12131{
12132 IEMOP_MNEMONIC(lahf, "lahf");
12133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12134 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12135 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12136 return IEMOP_RAISE_INVALID_OPCODE();
12137 IEM_MC_BEGIN(0, 1);
12138 IEM_MC_LOCAL(uint8_t, u8Flags);
12139 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12140 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12141 IEM_MC_ADVANCE_RIP();
12142 IEM_MC_END();
12143 return VINF_SUCCESS;
12144}
12145
12146
12147/**
12148 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12149 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12150 * prefixes. Will return on failures.
12151 * @param a_GCPtrMemOff The variable to store the offset in.
12152 */
12153#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12154 do \
12155 { \
12156 switch (pVCpu->iem.s.enmEffAddrMode) \
12157 { \
12158 case IEMMODE_16BIT: \
12159 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12160 break; \
12161 case IEMMODE_32BIT: \
12162 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12163 break; \
12164 case IEMMODE_64BIT: \
12165 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12166 break; \
12167 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12168 } \
12169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12170 } while (0)
12171
12172/** Opcode 0xa0. */
12173FNIEMOP_DEF(iemOp_mov_Al_Ob)
12174{
12175 /*
12176 * Get the offset and fend of lock prefixes.
12177 */
12178 RTGCPTR GCPtrMemOff;
12179 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12180
12181 /*
12182 * Fetch AL.
12183 */
12184 IEM_MC_BEGIN(0,1);
12185 IEM_MC_LOCAL(uint8_t, u8Tmp);
12186 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12187 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12188 IEM_MC_ADVANCE_RIP();
12189 IEM_MC_END();
12190 return VINF_SUCCESS;
12191}
12192
12193
12194/** Opcode 0xa1. */
12195FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12196{
12197 /*
12198 * Get the offset and fend of lock prefixes.
12199 */
12200 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12201 RTGCPTR GCPtrMemOff;
12202 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12203
12204 /*
12205 * Fetch rAX.
12206 */
12207 switch (pVCpu->iem.s.enmEffOpSize)
12208 {
12209 case IEMMODE_16BIT:
12210 IEM_MC_BEGIN(0,1);
12211 IEM_MC_LOCAL(uint16_t, u16Tmp);
12212 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12213 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12214 IEM_MC_ADVANCE_RIP();
12215 IEM_MC_END();
12216 return VINF_SUCCESS;
12217
12218 case IEMMODE_32BIT:
12219 IEM_MC_BEGIN(0,1);
12220 IEM_MC_LOCAL(uint32_t, u32Tmp);
12221 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12222 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12223 IEM_MC_ADVANCE_RIP();
12224 IEM_MC_END();
12225 return VINF_SUCCESS;
12226
12227 case IEMMODE_64BIT:
12228 IEM_MC_BEGIN(0,1);
12229 IEM_MC_LOCAL(uint64_t, u64Tmp);
12230 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12231 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12232 IEM_MC_ADVANCE_RIP();
12233 IEM_MC_END();
12234 return VINF_SUCCESS;
12235
12236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12237 }
12238}
12239
12240
12241/** Opcode 0xa2. */
12242FNIEMOP_DEF(iemOp_mov_Ob_AL)
12243{
12244 /*
12245 * Get the offset and fend of lock prefixes.
12246 */
12247 RTGCPTR GCPtrMemOff;
12248 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12249
12250 /*
12251 * Store AL.
12252 */
12253 IEM_MC_BEGIN(0,1);
12254 IEM_MC_LOCAL(uint8_t, u8Tmp);
12255 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12256 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12257 IEM_MC_ADVANCE_RIP();
12258 IEM_MC_END();
12259 return VINF_SUCCESS;
12260}
12261
12262
12263/** Opcode 0xa3. */
12264FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12265{
12266 /*
12267 * Get the offset and fend of lock prefixes.
12268 */
12269 RTGCPTR GCPtrMemOff;
12270 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12271
12272 /*
12273 * Store rAX.
12274 */
12275 switch (pVCpu->iem.s.enmEffOpSize)
12276 {
12277 case IEMMODE_16BIT:
12278 IEM_MC_BEGIN(0,1);
12279 IEM_MC_LOCAL(uint16_t, u16Tmp);
12280 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12281 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12282 IEM_MC_ADVANCE_RIP();
12283 IEM_MC_END();
12284 return VINF_SUCCESS;
12285
12286 case IEMMODE_32BIT:
12287 IEM_MC_BEGIN(0,1);
12288 IEM_MC_LOCAL(uint32_t, u32Tmp);
12289 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12290 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12291 IEM_MC_ADVANCE_RIP();
12292 IEM_MC_END();
12293 return VINF_SUCCESS;
12294
12295 case IEMMODE_64BIT:
12296 IEM_MC_BEGIN(0,1);
12297 IEM_MC_LOCAL(uint64_t, u64Tmp);
12298 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12299 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12300 IEM_MC_ADVANCE_RIP();
12301 IEM_MC_END();
12302 return VINF_SUCCESS;
12303
12304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12305 }
12306}
12307
12308/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12309#define IEM_MOVS_CASE(ValBits, AddrBits) \
12310 IEM_MC_BEGIN(0, 2); \
12311 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12312 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12313 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12314 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12315 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12316 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12317 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12318 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12319 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12320 } IEM_MC_ELSE() { \
12321 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12322 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12323 } IEM_MC_ENDIF(); \
12324 IEM_MC_ADVANCE_RIP(); \
12325 IEM_MC_END();
12326
12327/** Opcode 0xa4. */
12328FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12329{
12330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12331
12332 /*
12333 * Use the C implementation if a repeat prefix is encountered.
12334 */
12335 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12336 {
12337 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12338 switch (pVCpu->iem.s.enmEffAddrMode)
12339 {
12340 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12341 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12342 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12344 }
12345 }
12346 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12347
12348 /*
12349 * Sharing case implementation with movs[wdq] below.
12350 */
12351 switch (pVCpu->iem.s.enmEffAddrMode)
12352 {
12353 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12354 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12355 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12357 }
12358 return VINF_SUCCESS;
12359}
12360
12361
12362/** Opcode 0xa5. */
12363FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12364{
12365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12366
12367 /*
12368 * Use the C implementation if a repeat prefix is encountered.
12369 */
12370 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12371 {
12372 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12373 switch (pVCpu->iem.s.enmEffOpSize)
12374 {
12375 case IEMMODE_16BIT:
12376 switch (pVCpu->iem.s.enmEffAddrMode)
12377 {
12378 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12379 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12380 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12382 }
12383 break;
12384 case IEMMODE_32BIT:
12385 switch (pVCpu->iem.s.enmEffAddrMode)
12386 {
12387 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12388 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12389 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12391 }
12392 case IEMMODE_64BIT:
12393 switch (pVCpu->iem.s.enmEffAddrMode)
12394 {
12395 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12396 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12397 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12399 }
12400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12401 }
12402 }
12403 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12404
12405 /*
12406 * Annoying double switch here.
12407 * Using ugly macro for implementing the cases, sharing it with movsb.
12408 */
12409 switch (pVCpu->iem.s.enmEffOpSize)
12410 {
12411 case IEMMODE_16BIT:
12412 switch (pVCpu->iem.s.enmEffAddrMode)
12413 {
12414 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12415 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12416 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12418 }
12419 break;
12420
12421 case IEMMODE_32BIT:
12422 switch (pVCpu->iem.s.enmEffAddrMode)
12423 {
12424 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12425 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12426 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12428 }
12429 break;
12430
12431 case IEMMODE_64BIT:
12432 switch (pVCpu->iem.s.enmEffAddrMode)
12433 {
12434 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12435 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12436 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12438 }
12439 break;
12440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12441 }
12442 return VINF_SUCCESS;
12443}
12444
12445#undef IEM_MOVS_CASE
12446
12447/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12448#define IEM_CMPS_CASE(ValBits, AddrBits) \
12449 IEM_MC_BEGIN(3, 3); \
12450 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12451 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12452 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12453 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12454 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12455 \
12456 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12457 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12458 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12459 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12460 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12461 IEM_MC_REF_EFLAGS(pEFlags); \
12462 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12463 \
12464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12465 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12466 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12467 } IEM_MC_ELSE() { \
12468 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12469 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12470 } IEM_MC_ENDIF(); \
12471 IEM_MC_ADVANCE_RIP(); \
12472 IEM_MC_END(); \
12473
12474/** Opcode 0xa6. */
12475FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12476{
12477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12478
12479 /*
12480 * Use the C implementation if a repeat prefix is encountered.
12481 */
12482 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12483 {
12484 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12485 switch (pVCpu->iem.s.enmEffAddrMode)
12486 {
12487 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12488 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12489 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12491 }
12492 }
12493 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12494 {
12495 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12496 switch (pVCpu->iem.s.enmEffAddrMode)
12497 {
12498 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12499 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12500 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12502 }
12503 }
12504 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12505
12506 /*
12507 * Sharing case implementation with cmps[wdq] below.
12508 */
12509 switch (pVCpu->iem.s.enmEffAddrMode)
12510 {
12511 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12512 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12513 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12515 }
12516 return VINF_SUCCESS;
12517
12518}
12519
12520
12521/** Opcode 0xa7. */
12522FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12523{
12524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12525
12526 /*
12527 * Use the C implementation if a repeat prefix is encountered.
12528 */
12529 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12530 {
12531 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12532 switch (pVCpu->iem.s.enmEffOpSize)
12533 {
12534 case IEMMODE_16BIT:
12535 switch (pVCpu->iem.s.enmEffAddrMode)
12536 {
12537 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12538 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12539 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12541 }
12542 break;
12543 case IEMMODE_32BIT:
12544 switch (pVCpu->iem.s.enmEffAddrMode)
12545 {
12546 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12547 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12548 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12550 }
12551 case IEMMODE_64BIT:
12552 switch (pVCpu->iem.s.enmEffAddrMode)
12553 {
12554 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12555 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12556 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12558 }
12559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12560 }
12561 }
12562
12563 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12564 {
12565 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12566 switch (pVCpu->iem.s.enmEffOpSize)
12567 {
12568 case IEMMODE_16BIT:
12569 switch (pVCpu->iem.s.enmEffAddrMode)
12570 {
12571 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12572 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12573 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12575 }
12576 break;
12577 case IEMMODE_32BIT:
12578 switch (pVCpu->iem.s.enmEffAddrMode)
12579 {
12580 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12581 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12582 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12584 }
12585 case IEMMODE_64BIT:
12586 switch (pVCpu->iem.s.enmEffAddrMode)
12587 {
12588 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12589 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12590 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12592 }
12593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12594 }
12595 }
12596
12597 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12598
12599 /*
12600 * Annoying double switch here.
12601 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12602 */
12603 switch (pVCpu->iem.s.enmEffOpSize)
12604 {
12605 case IEMMODE_16BIT:
12606 switch (pVCpu->iem.s.enmEffAddrMode)
12607 {
12608 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12609 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12610 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12611 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12612 }
12613 break;
12614
12615 case IEMMODE_32BIT:
12616 switch (pVCpu->iem.s.enmEffAddrMode)
12617 {
12618 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12619 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12620 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12622 }
12623 break;
12624
12625 case IEMMODE_64BIT:
12626 switch (pVCpu->iem.s.enmEffAddrMode)
12627 {
12628 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12629 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12630 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12632 }
12633 break;
12634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12635 }
12636 return VINF_SUCCESS;
12637
12638}
12639
12640#undef IEM_CMPS_CASE
12641
12642/** Opcode 0xa8. */
12643FNIEMOP_DEF(iemOp_test_AL_Ib)
12644{
12645 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12646 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12647 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12648}
12649
12650
12651/** Opcode 0xa9. */
12652FNIEMOP_DEF(iemOp_test_eAX_Iz)
12653{
12654 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12655 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12656 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12657}
12658
12659
12660/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12661#define IEM_STOS_CASE(ValBits, AddrBits) \
12662 IEM_MC_BEGIN(0, 2); \
12663 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12664 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12665 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12666 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12667 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12668 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12669 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12670 } IEM_MC_ELSE() { \
12671 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12672 } IEM_MC_ENDIF(); \
12673 IEM_MC_ADVANCE_RIP(); \
12674 IEM_MC_END(); \
12675
12676/** Opcode 0xaa. */
12677FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12678{
12679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12680
12681 /*
12682 * Use the C implementation if a repeat prefix is encountered.
12683 */
12684 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12685 {
12686 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12687 switch (pVCpu->iem.s.enmEffAddrMode)
12688 {
12689 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12690 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12691 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12693 }
12694 }
12695 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12696
12697 /*
12698 * Sharing case implementation with stos[wdq] below.
12699 */
12700 switch (pVCpu->iem.s.enmEffAddrMode)
12701 {
12702 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12703 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12704 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12706 }
12707 return VINF_SUCCESS;
12708}
12709
12710
12711/** Opcode 0xab. */
12712FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12713{
12714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12715
12716 /*
12717 * Use the C implementation if a repeat prefix is encountered.
12718 */
12719 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12720 {
12721 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12722 switch (pVCpu->iem.s.enmEffOpSize)
12723 {
12724 case IEMMODE_16BIT:
12725 switch (pVCpu->iem.s.enmEffAddrMode)
12726 {
12727 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12728 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12729 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12731 }
12732 break;
12733 case IEMMODE_32BIT:
12734 switch (pVCpu->iem.s.enmEffAddrMode)
12735 {
12736 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12737 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12738 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12740 }
12741 case IEMMODE_64BIT:
12742 switch (pVCpu->iem.s.enmEffAddrMode)
12743 {
12744 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12748 }
12749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12750 }
12751 }
12752 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12753
12754 /*
12755 * Annoying double switch here.
12756 * Using ugly macro for implementing the cases, sharing it with stosb.
12757 */
12758 switch (pVCpu->iem.s.enmEffOpSize)
12759 {
12760 case IEMMODE_16BIT:
12761 switch (pVCpu->iem.s.enmEffAddrMode)
12762 {
12763 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12764 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12765 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12767 }
12768 break;
12769
12770 case IEMMODE_32BIT:
12771 switch (pVCpu->iem.s.enmEffAddrMode)
12772 {
12773 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12774 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12775 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12777 }
12778 break;
12779
12780 case IEMMODE_64BIT:
12781 switch (pVCpu->iem.s.enmEffAddrMode)
12782 {
12783 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12784 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12785 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12787 }
12788 break;
12789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12790 }
12791 return VINF_SUCCESS;
12792}
12793
12794#undef IEM_STOS_CASE
12795
12796/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12797#define IEM_LODS_CASE(ValBits, AddrBits) \
12798 IEM_MC_BEGIN(0, 2); \
12799 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12800 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12801 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12802 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12803 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12804 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12805 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12806 } IEM_MC_ELSE() { \
12807 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12808 } IEM_MC_ENDIF(); \
12809 IEM_MC_ADVANCE_RIP(); \
12810 IEM_MC_END();
12811
12812/** Opcode 0xac. */
12813FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12814{
12815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12816
12817 /*
12818 * Use the C implementation if a repeat prefix is encountered.
12819 */
12820 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12821 {
12822 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12823 switch (pVCpu->iem.s.enmEffAddrMode)
12824 {
12825 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12826 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12827 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12829 }
12830 }
12831 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12832
12833 /*
12834 * Sharing case implementation with stos[wdq] below.
12835 */
12836 switch (pVCpu->iem.s.enmEffAddrMode)
12837 {
12838 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12839 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12840 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12842 }
12843 return VINF_SUCCESS;
12844}
12845
12846
12847/** Opcode 0xad. */
12848FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12849{
12850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12851
12852 /*
12853 * Use the C implementation if a repeat prefix is encountered.
12854 */
12855 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12856 {
12857 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12858 switch (pVCpu->iem.s.enmEffOpSize)
12859 {
12860 case IEMMODE_16BIT:
12861 switch (pVCpu->iem.s.enmEffAddrMode)
12862 {
12863 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12864 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12865 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12867 }
12868 break;
12869 case IEMMODE_32BIT:
12870 switch (pVCpu->iem.s.enmEffAddrMode)
12871 {
12872 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12873 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12874 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12876 }
12877 case IEMMODE_64BIT:
12878 switch (pVCpu->iem.s.enmEffAddrMode)
12879 {
12880 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12881 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12882 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12884 }
12885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12886 }
12887 }
12888 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12889
12890 /*
12891 * Annoying double switch here.
12892 * Using ugly macro for implementing the cases, sharing it with lodsb.
12893 */
12894 switch (pVCpu->iem.s.enmEffOpSize)
12895 {
12896 case IEMMODE_16BIT:
12897 switch (pVCpu->iem.s.enmEffAddrMode)
12898 {
12899 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12900 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12901 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12903 }
12904 break;
12905
12906 case IEMMODE_32BIT:
12907 switch (pVCpu->iem.s.enmEffAddrMode)
12908 {
12909 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12910 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12911 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12913 }
12914 break;
12915
12916 case IEMMODE_64BIT:
12917 switch (pVCpu->iem.s.enmEffAddrMode)
12918 {
12919 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12920 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12921 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12923 }
12924 break;
12925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12926 }
12927 return VINF_SUCCESS;
12928}
12929
12930#undef IEM_LODS_CASE
12931
12932/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12933#define IEM_SCAS_CASE(ValBits, AddrBits) \
12934 IEM_MC_BEGIN(3, 2); \
12935 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12936 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12937 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12938 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12939 \
12940 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12941 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12942 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12943 IEM_MC_REF_EFLAGS(pEFlags); \
12944 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12945 \
12946 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12947 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12948 } IEM_MC_ELSE() { \
12949 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12950 } IEM_MC_ENDIF(); \
12951 IEM_MC_ADVANCE_RIP(); \
12952 IEM_MC_END();
12953
12954/** Opcode 0xae. */
12955FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12956{
12957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12958
12959 /*
12960 * Use the C implementation if a repeat prefix is encountered.
12961 */
12962 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12963 {
12964 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12965 switch (pVCpu->iem.s.enmEffAddrMode)
12966 {
12967 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12968 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12969 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12971 }
12972 }
12973 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12974 {
12975 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12976 switch (pVCpu->iem.s.enmEffAddrMode)
12977 {
12978 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12979 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12980 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12982 }
12983 }
12984 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12985
12986 /*
12987 * Sharing case implementation with stos[wdq] below.
12988 */
12989 switch (pVCpu->iem.s.enmEffAddrMode)
12990 {
12991 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12992 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12993 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12995 }
12996 return VINF_SUCCESS;
12997}
12998
12999
13000/** Opcode 0xaf. */
13001FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
13002{
13003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13004
13005 /*
13006 * Use the C implementation if a repeat prefix is encountered.
13007 */
13008 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
13009 {
13010 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
13011 switch (pVCpu->iem.s.enmEffOpSize)
13012 {
13013 case IEMMODE_16BIT:
13014 switch (pVCpu->iem.s.enmEffAddrMode)
13015 {
13016 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
13017 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
13018 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
13019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13020 }
13021 break;
13022 case IEMMODE_32BIT:
13023 switch (pVCpu->iem.s.enmEffAddrMode)
13024 {
13025 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
13026 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
13027 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
13028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13029 }
13030 case IEMMODE_64BIT:
13031 switch (pVCpu->iem.s.enmEffAddrMode)
13032 {
13033 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
13034 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
13035 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
13036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13037 }
13038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13039 }
13040 }
13041 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13042 {
13043 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
13044 switch (pVCpu->iem.s.enmEffOpSize)
13045 {
13046 case IEMMODE_16BIT:
13047 switch (pVCpu->iem.s.enmEffAddrMode)
13048 {
13049 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
13050 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
13051 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
13052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13053 }
13054 break;
13055 case IEMMODE_32BIT:
13056 switch (pVCpu->iem.s.enmEffAddrMode)
13057 {
13058 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
13059 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
13060 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
13061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13062 }
13063 case IEMMODE_64BIT:
13064 switch (pVCpu->iem.s.enmEffAddrMode)
13065 {
13066 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
13067 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
13068 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
13069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13070 }
13071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13072 }
13073 }
13074 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
13075
13076 /*
13077 * Annoying double switch here.
13078 * Using ugly macro for implementing the cases, sharing it with scasb.
13079 */
13080 switch (pVCpu->iem.s.enmEffOpSize)
13081 {
13082 case IEMMODE_16BIT:
13083 switch (pVCpu->iem.s.enmEffAddrMode)
13084 {
13085 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13086 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13087 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13089 }
13090 break;
13091
13092 case IEMMODE_32BIT:
13093 switch (pVCpu->iem.s.enmEffAddrMode)
13094 {
13095 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13096 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13097 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13099 }
13100 break;
13101
13102 case IEMMODE_64BIT:
13103 switch (pVCpu->iem.s.enmEffAddrMode)
13104 {
13105 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13106 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13107 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13109 }
13110 break;
13111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13112 }
13113 return VINF_SUCCESS;
13114}
13115
13116#undef IEM_SCAS_CASE
13117
13118/**
13119 * Common 'mov r8, imm8' helper.
13120 */
13121FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13122{
13123 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13125
13126 IEM_MC_BEGIN(0, 1);
13127 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13128 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13129 IEM_MC_ADVANCE_RIP();
13130 IEM_MC_END();
13131
13132 return VINF_SUCCESS;
13133}
13134
13135
13136/** Opcode 0xb0. */
13137FNIEMOP_DEF(iemOp_mov_AL_Ib)
13138{
13139 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13140 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13141}
13142
13143
13144/** Opcode 0xb1. */
13145FNIEMOP_DEF(iemOp_CL_Ib)
13146{
13147 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13148 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13149}
13150
13151
13152/** Opcode 0xb2. */
13153FNIEMOP_DEF(iemOp_DL_Ib)
13154{
13155 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13156 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13157}
13158
13159
13160/** Opcode 0xb3. */
13161FNIEMOP_DEF(iemOp_BL_Ib)
13162{
13163 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13164 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13165}
13166
13167
13168/** Opcode 0xb4. */
13169FNIEMOP_DEF(iemOp_mov_AH_Ib)
13170{
13171 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13172 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13173}
13174
13175
13176/** Opcode 0xb5. */
13177FNIEMOP_DEF(iemOp_CH_Ib)
13178{
13179 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13180 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13181}
13182
13183
13184/** Opcode 0xb6. */
13185FNIEMOP_DEF(iemOp_DH_Ib)
13186{
13187 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13188 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13189}
13190
13191
13192/** Opcode 0xb7. */
13193FNIEMOP_DEF(iemOp_BH_Ib)
13194{
13195 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13196 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13197}
13198
13199
13200/**
13201 * Common 'mov regX,immX' helper.
13202 */
13203FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13204{
13205 switch (pVCpu->iem.s.enmEffOpSize)
13206 {
13207 case IEMMODE_16BIT:
13208 {
13209 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13211
13212 IEM_MC_BEGIN(0, 1);
13213 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13214 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13215 IEM_MC_ADVANCE_RIP();
13216 IEM_MC_END();
13217 break;
13218 }
13219
13220 case IEMMODE_32BIT:
13221 {
13222 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13224
13225 IEM_MC_BEGIN(0, 1);
13226 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13227 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13228 IEM_MC_ADVANCE_RIP();
13229 IEM_MC_END();
13230 break;
13231 }
13232 case IEMMODE_64BIT:
13233 {
13234 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13236
13237 IEM_MC_BEGIN(0, 1);
13238 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13239 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13240 IEM_MC_ADVANCE_RIP();
13241 IEM_MC_END();
13242 break;
13243 }
13244 }
13245
13246 return VINF_SUCCESS;
13247}
13248
13249
13250/** Opcode 0xb8. */
13251FNIEMOP_DEF(iemOp_eAX_Iv)
13252{
13253 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13254 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13255}
13256
13257
13258/** Opcode 0xb9. */
13259FNIEMOP_DEF(iemOp_eCX_Iv)
13260{
13261 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13262 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13263}
13264
13265
13266/** Opcode 0xba. */
13267FNIEMOP_DEF(iemOp_eDX_Iv)
13268{
13269 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13270 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13271}
13272
13273
13274/** Opcode 0xbb. */
13275FNIEMOP_DEF(iemOp_eBX_Iv)
13276{
13277 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13278 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13279}
13280
13281
13282/** Opcode 0xbc. */
13283FNIEMOP_DEF(iemOp_eSP_Iv)
13284{
13285 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13286 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13287}
13288
13289
13290/** Opcode 0xbd. */
13291FNIEMOP_DEF(iemOp_eBP_Iv)
13292{
13293 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13294 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13295}
13296
13297
13298/** Opcode 0xbe. */
13299FNIEMOP_DEF(iemOp_eSI_Iv)
13300{
13301 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13302 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13303}
13304
13305
13306/** Opcode 0xbf. */
13307FNIEMOP_DEF(iemOp_eDI_Iv)
13308{
13309 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13310 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13311}
13312
13313
13314/** Opcode 0xc0. */
13315FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13316{
13317 IEMOP_HLP_MIN_186();
13318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13319 PCIEMOPSHIFTSIZES pImpl;
13320 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13321 {
13322 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13323 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13324 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13325 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13326 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13327 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13328 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13329 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13330 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13331 }
13332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13333
13334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13335 {
13336 /* register */
13337 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13339 IEM_MC_BEGIN(3, 0);
13340 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13341 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13343 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13344 IEM_MC_REF_EFLAGS(pEFlags);
13345 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13346 IEM_MC_ADVANCE_RIP();
13347 IEM_MC_END();
13348 }
13349 else
13350 {
13351 /* memory */
13352 IEM_MC_BEGIN(3, 2);
13353 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13354 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13357
13358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13359 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13360 IEM_MC_ASSIGN(cShiftArg, cShift);
13361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13362 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13363 IEM_MC_FETCH_EFLAGS(EFlags);
13364 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13365
13366 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13367 IEM_MC_COMMIT_EFLAGS(EFlags);
13368 IEM_MC_ADVANCE_RIP();
13369 IEM_MC_END();
13370 }
13371 return VINF_SUCCESS;
13372}
13373
13374
13375/** Opcode 0xc1. */
13376FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13377{
13378 IEMOP_HLP_MIN_186();
13379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13380 PCIEMOPSHIFTSIZES pImpl;
13381 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13382 {
13383 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13384 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13385 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13386 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13387 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13388 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13389 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13390 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13391 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13392 }
13393 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13394
13395 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13396 {
13397 /* register */
13398 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13400 switch (pVCpu->iem.s.enmEffOpSize)
13401 {
13402 case IEMMODE_16BIT:
13403 IEM_MC_BEGIN(3, 0);
13404 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13405 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13406 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13407 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13408 IEM_MC_REF_EFLAGS(pEFlags);
13409 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13410 IEM_MC_ADVANCE_RIP();
13411 IEM_MC_END();
13412 return VINF_SUCCESS;
13413
13414 case IEMMODE_32BIT:
13415 IEM_MC_BEGIN(3, 0);
13416 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13417 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13418 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13419 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13420 IEM_MC_REF_EFLAGS(pEFlags);
13421 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13422 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13423 IEM_MC_ADVANCE_RIP();
13424 IEM_MC_END();
13425 return VINF_SUCCESS;
13426
13427 case IEMMODE_64BIT:
13428 IEM_MC_BEGIN(3, 0);
13429 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13430 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13432 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13433 IEM_MC_REF_EFLAGS(pEFlags);
13434 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13435 IEM_MC_ADVANCE_RIP();
13436 IEM_MC_END();
13437 return VINF_SUCCESS;
13438
13439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13440 }
13441 }
13442 else
13443 {
13444 /* memory */
13445 switch (pVCpu->iem.s.enmEffOpSize)
13446 {
13447 case IEMMODE_16BIT:
13448 IEM_MC_BEGIN(3, 2);
13449 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13450 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13451 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13453
13454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13455 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13456 IEM_MC_ASSIGN(cShiftArg, cShift);
13457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13458 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13459 IEM_MC_FETCH_EFLAGS(EFlags);
13460 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13461
13462 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13463 IEM_MC_COMMIT_EFLAGS(EFlags);
13464 IEM_MC_ADVANCE_RIP();
13465 IEM_MC_END();
13466 return VINF_SUCCESS;
13467
13468 case IEMMODE_32BIT:
13469 IEM_MC_BEGIN(3, 2);
13470 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13471 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13472 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13474
13475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13476 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13477 IEM_MC_ASSIGN(cShiftArg, cShift);
13478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13479 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13480 IEM_MC_FETCH_EFLAGS(EFlags);
13481 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13482
13483 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13484 IEM_MC_COMMIT_EFLAGS(EFlags);
13485 IEM_MC_ADVANCE_RIP();
13486 IEM_MC_END();
13487 return VINF_SUCCESS;
13488
13489 case IEMMODE_64BIT:
13490 IEM_MC_BEGIN(3, 2);
13491 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13492 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13493 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13495
13496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13497 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13498 IEM_MC_ASSIGN(cShiftArg, cShift);
13499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13500 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13501 IEM_MC_FETCH_EFLAGS(EFlags);
13502 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13503
13504 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13505 IEM_MC_COMMIT_EFLAGS(EFlags);
13506 IEM_MC_ADVANCE_RIP();
13507 IEM_MC_END();
13508 return VINF_SUCCESS;
13509
13510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13511 }
13512 }
13513}
13514
13515
13516/** Opcode 0xc2. */
13517FNIEMOP_DEF(iemOp_retn_Iw)
13518{
13519 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13520 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13522 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13523 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13524}
13525
13526
13527/** Opcode 0xc3. */
13528FNIEMOP_DEF(iemOp_retn)
13529{
13530 IEMOP_MNEMONIC(retn, "retn");
13531 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13533 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13534}
13535
13536
13537/** Opcode 0xc4. */
13538FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13539{
13540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13541 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13542 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13543 {
13544 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13545 /* The LES instruction is invalid 64-bit mode. In legacy and
13546 compatability mode it is invalid with MOD=3.
13547 The use as a VEX prefix is made possible by assigning the inverted
13548 REX.R to the top MOD bit, and the top bit in the inverted register
13549 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13550 to accessing registers 0..7 in this VEX form. */
13551 /** @todo VEX: Just use new tables for it. */
13552 return IEMOP_RAISE_INVALID_OPCODE();
13553 }
13554 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13555 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13556}
13557
13558
13559/** Opcode 0xc5. */
13560FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13561{
13562 /* The LDS instruction is invalid 64-bit mode. In legacy and
13563 compatability mode it is invalid with MOD=3.
13564 The use as a VEX prefix is made possible by assigning the inverted
13565 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13566 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13568 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13569 {
13570 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13571 {
13572 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13573 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13574 }
13575 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13576 }
13577
13578 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13579 /** @todo Test when exctly the VEX conformance checks kick in during
13580 * instruction decoding and fetching (using \#PF). */
13581 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13582 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13583 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13584#if 0 /* will make sense of this next week... */
13585 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13586 &&
13587 )
13588 {
13589
13590 }
13591#endif
13592
13593 /** @todo VEX: Just use new tables for it. */
13594 return IEMOP_RAISE_INVALID_OPCODE();
13595}
13596
13597
13598/** Opcode 0xc6. */
13599FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13600{
13601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13602 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13603 return IEMOP_RAISE_INVALID_OPCODE();
13604 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13605
13606 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13607 {
13608 /* register access */
13609 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13611 IEM_MC_BEGIN(0, 0);
13612 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13613 IEM_MC_ADVANCE_RIP();
13614 IEM_MC_END();
13615 }
13616 else
13617 {
13618 /* memory access. */
13619 IEM_MC_BEGIN(0, 1);
13620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13622 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13624 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13625 IEM_MC_ADVANCE_RIP();
13626 IEM_MC_END();
13627 }
13628 return VINF_SUCCESS;
13629}
13630
13631
13632/** Opcode 0xc7. */
13633FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13634{
13635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13636 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13637 return IEMOP_RAISE_INVALID_OPCODE();
13638 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13639
13640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13641 {
13642 /* register access */
13643 switch (pVCpu->iem.s.enmEffOpSize)
13644 {
13645 case IEMMODE_16BIT:
13646 IEM_MC_BEGIN(0, 0);
13647 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13649 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13650 IEM_MC_ADVANCE_RIP();
13651 IEM_MC_END();
13652 return VINF_SUCCESS;
13653
13654 case IEMMODE_32BIT:
13655 IEM_MC_BEGIN(0, 0);
13656 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13658 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13659 IEM_MC_ADVANCE_RIP();
13660 IEM_MC_END();
13661 return VINF_SUCCESS;
13662
13663 case IEMMODE_64BIT:
13664 IEM_MC_BEGIN(0, 0);
13665 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13667 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13668 IEM_MC_ADVANCE_RIP();
13669 IEM_MC_END();
13670 return VINF_SUCCESS;
13671
13672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13673 }
13674 }
13675 else
13676 {
13677 /* memory access. */
13678 switch (pVCpu->iem.s.enmEffOpSize)
13679 {
13680 case IEMMODE_16BIT:
13681 IEM_MC_BEGIN(0, 1);
13682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13684 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13686 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13687 IEM_MC_ADVANCE_RIP();
13688 IEM_MC_END();
13689 return VINF_SUCCESS;
13690
13691 case IEMMODE_32BIT:
13692 IEM_MC_BEGIN(0, 1);
13693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13695 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13697 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13698 IEM_MC_ADVANCE_RIP();
13699 IEM_MC_END();
13700 return VINF_SUCCESS;
13701
13702 case IEMMODE_64BIT:
13703 IEM_MC_BEGIN(0, 1);
13704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13706 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13708 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13709 IEM_MC_ADVANCE_RIP();
13710 IEM_MC_END();
13711 return VINF_SUCCESS;
13712
13713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13714 }
13715 }
13716}
13717
13718
13719
13720
13721/** Opcode 0xc8. */
13722FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13723{
13724 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13725 IEMOP_HLP_MIN_186();
13726 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13727 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13728 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13730 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13731}
13732
13733
13734/** Opcode 0xc9. */
13735FNIEMOP_DEF(iemOp_leave)
13736{
13737 IEMOP_MNEMONIC(leave, "leave");
13738 IEMOP_HLP_MIN_186();
13739 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13741 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13742}
13743
13744
13745/** Opcode 0xca. */
13746FNIEMOP_DEF(iemOp_retf_Iw)
13747{
13748 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13749 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13751 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13752 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13753}
13754
13755
13756/** Opcode 0xcb. */
13757FNIEMOP_DEF(iemOp_retf)
13758{
13759 IEMOP_MNEMONIC(retf, "retf");
13760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13761 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13762 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13763}
13764
13765
13766/** Opcode 0xcc. */
13767FNIEMOP_DEF(iemOp_int_3)
13768{
13769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13770 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13771}
13772
13773
13774/** Opcode 0xcd. */
13775FNIEMOP_DEF(iemOp_int_Ib)
13776{
13777 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13779 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13780}
13781
13782
13783/** Opcode 0xce. */
13784FNIEMOP_DEF(iemOp_into)
13785{
13786 IEMOP_MNEMONIC(into, "into");
13787 IEMOP_HLP_NO_64BIT();
13788
13789 IEM_MC_BEGIN(2, 0);
13790 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13791 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13792 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13793 IEM_MC_END();
13794 return VINF_SUCCESS;
13795}
13796
13797
13798/** Opcode 0xcf. */
13799FNIEMOP_DEF(iemOp_iret)
13800{
13801 IEMOP_MNEMONIC(iret, "iret");
13802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13803 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13804}
13805
13806
13807/** Opcode 0xd0. */
13808FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13809{
13810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13811 PCIEMOPSHIFTSIZES pImpl;
13812 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13813 {
13814 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13815 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13816 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13817 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13818 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13819 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13820 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13821 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13822 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13823 }
13824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13825
13826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13827 {
13828 /* register */
13829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13830 IEM_MC_BEGIN(3, 0);
13831 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13832 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13833 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13834 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13835 IEM_MC_REF_EFLAGS(pEFlags);
13836 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13837 IEM_MC_ADVANCE_RIP();
13838 IEM_MC_END();
13839 }
13840 else
13841 {
13842 /* memory */
13843 IEM_MC_BEGIN(3, 2);
13844 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13845 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13846 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13848
13849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13851 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13852 IEM_MC_FETCH_EFLAGS(EFlags);
13853 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13854
13855 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13856 IEM_MC_COMMIT_EFLAGS(EFlags);
13857 IEM_MC_ADVANCE_RIP();
13858 IEM_MC_END();
13859 }
13860 return VINF_SUCCESS;
13861}
13862
13863
13864
13865/** Opcode 0xd1. */
13866FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13867{
13868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13869 PCIEMOPSHIFTSIZES pImpl;
13870 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13871 {
13872 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13873 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13874 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13875 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13876 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13877 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13878 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13879 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13880 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13881 }
13882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13883
13884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13885 {
13886 /* register */
13887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13888 switch (pVCpu->iem.s.enmEffOpSize)
13889 {
13890 case IEMMODE_16BIT:
13891 IEM_MC_BEGIN(3, 0);
13892 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13893 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13894 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13895 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13896 IEM_MC_REF_EFLAGS(pEFlags);
13897 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13898 IEM_MC_ADVANCE_RIP();
13899 IEM_MC_END();
13900 return VINF_SUCCESS;
13901
13902 case IEMMODE_32BIT:
13903 IEM_MC_BEGIN(3, 0);
13904 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13905 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13906 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13907 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13908 IEM_MC_REF_EFLAGS(pEFlags);
13909 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13910 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13911 IEM_MC_ADVANCE_RIP();
13912 IEM_MC_END();
13913 return VINF_SUCCESS;
13914
13915 case IEMMODE_64BIT:
13916 IEM_MC_BEGIN(3, 0);
13917 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13918 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13919 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13920 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13921 IEM_MC_REF_EFLAGS(pEFlags);
13922 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13923 IEM_MC_ADVANCE_RIP();
13924 IEM_MC_END();
13925 return VINF_SUCCESS;
13926
13927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13928 }
13929 }
13930 else
13931 {
13932 /* memory */
13933 switch (pVCpu->iem.s.enmEffOpSize)
13934 {
13935 case IEMMODE_16BIT:
13936 IEM_MC_BEGIN(3, 2);
13937 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13938 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13939 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13941
13942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13944 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13945 IEM_MC_FETCH_EFLAGS(EFlags);
13946 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13947
13948 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13949 IEM_MC_COMMIT_EFLAGS(EFlags);
13950 IEM_MC_ADVANCE_RIP();
13951 IEM_MC_END();
13952 return VINF_SUCCESS;
13953
13954 case IEMMODE_32BIT:
13955 IEM_MC_BEGIN(3, 2);
13956 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13957 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13958 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13960
13961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13963 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13964 IEM_MC_FETCH_EFLAGS(EFlags);
13965 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13966
13967 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13968 IEM_MC_COMMIT_EFLAGS(EFlags);
13969 IEM_MC_ADVANCE_RIP();
13970 IEM_MC_END();
13971 return VINF_SUCCESS;
13972
13973 case IEMMODE_64BIT:
13974 IEM_MC_BEGIN(3, 2);
13975 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13976 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13977 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13979
13980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13982 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13983 IEM_MC_FETCH_EFLAGS(EFlags);
13984 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13985
13986 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13987 IEM_MC_COMMIT_EFLAGS(EFlags);
13988 IEM_MC_ADVANCE_RIP();
13989 IEM_MC_END();
13990 return VINF_SUCCESS;
13991
13992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13993 }
13994 }
13995}
13996
13997
13998/** Opcode 0xd2. */
13999FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
14000{
14001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14002 PCIEMOPSHIFTSIZES pImpl;
14003 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14004 {
14005 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
14006 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
14007 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
14008 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
14009 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
14010 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
14011 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
14012 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14013 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
14014 }
14015 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14016
14017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14018 {
14019 /* register */
14020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14021 IEM_MC_BEGIN(3, 0);
14022 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14023 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14024 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14025 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14026 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14027 IEM_MC_REF_EFLAGS(pEFlags);
14028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14029 IEM_MC_ADVANCE_RIP();
14030 IEM_MC_END();
14031 }
14032 else
14033 {
14034 /* memory */
14035 IEM_MC_BEGIN(3, 2);
14036 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14037 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14038 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14040
14041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14043 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14044 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14045 IEM_MC_FETCH_EFLAGS(EFlags);
14046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14047
14048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14049 IEM_MC_COMMIT_EFLAGS(EFlags);
14050 IEM_MC_ADVANCE_RIP();
14051 IEM_MC_END();
14052 }
14053 return VINF_SUCCESS;
14054}
14055
14056
14057/** Opcode 0xd3. */
14058FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
14059{
14060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14061 PCIEMOPSHIFTSIZES pImpl;
14062 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14063 {
14064 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
14065 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
14066 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
14067 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
14068 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
14069 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
14070 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
14071 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14072 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
14073 }
14074 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14075
14076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14077 {
14078 /* register */
14079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14080 switch (pVCpu->iem.s.enmEffOpSize)
14081 {
14082 case IEMMODE_16BIT:
14083 IEM_MC_BEGIN(3, 0);
14084 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14085 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14086 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14087 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14088 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14089 IEM_MC_REF_EFLAGS(pEFlags);
14090 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14091 IEM_MC_ADVANCE_RIP();
14092 IEM_MC_END();
14093 return VINF_SUCCESS;
14094
14095 case IEMMODE_32BIT:
14096 IEM_MC_BEGIN(3, 0);
14097 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14098 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14099 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14100 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14101 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14102 IEM_MC_REF_EFLAGS(pEFlags);
14103 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14104 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14105 IEM_MC_ADVANCE_RIP();
14106 IEM_MC_END();
14107 return VINF_SUCCESS;
14108
14109 case IEMMODE_64BIT:
14110 IEM_MC_BEGIN(3, 0);
14111 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14112 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14113 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14114 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14115 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14116 IEM_MC_REF_EFLAGS(pEFlags);
14117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14118 IEM_MC_ADVANCE_RIP();
14119 IEM_MC_END();
14120 return VINF_SUCCESS;
14121
14122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14123 }
14124 }
14125 else
14126 {
14127 /* memory */
14128 switch (pVCpu->iem.s.enmEffOpSize)
14129 {
14130 case IEMMODE_16BIT:
14131 IEM_MC_BEGIN(3, 2);
14132 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14133 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14134 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14136
14137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14139 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14140 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14141 IEM_MC_FETCH_EFLAGS(EFlags);
14142 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14143
14144 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14145 IEM_MC_COMMIT_EFLAGS(EFlags);
14146 IEM_MC_ADVANCE_RIP();
14147 IEM_MC_END();
14148 return VINF_SUCCESS;
14149
14150 case IEMMODE_32BIT:
14151 IEM_MC_BEGIN(3, 2);
14152 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14153 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14154 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14156
14157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14159 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14160 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14161 IEM_MC_FETCH_EFLAGS(EFlags);
14162 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14163
14164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14165 IEM_MC_COMMIT_EFLAGS(EFlags);
14166 IEM_MC_ADVANCE_RIP();
14167 IEM_MC_END();
14168 return VINF_SUCCESS;
14169
14170 case IEMMODE_64BIT:
14171 IEM_MC_BEGIN(3, 2);
14172 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14173 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14174 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14176
14177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14179 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14180 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14181 IEM_MC_FETCH_EFLAGS(EFlags);
14182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14183
14184 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14185 IEM_MC_COMMIT_EFLAGS(EFlags);
14186 IEM_MC_ADVANCE_RIP();
14187 IEM_MC_END();
14188 return VINF_SUCCESS;
14189
14190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14191 }
14192 }
14193}
14194
14195/** Opcode 0xd4. */
14196FNIEMOP_DEF(iemOp_aam_Ib)
14197{
14198 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14199 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14201 IEMOP_HLP_NO_64BIT();
14202 if (!bImm)
14203 return IEMOP_RAISE_DIVIDE_ERROR();
14204 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14205}
14206
14207
14208/** Opcode 0xd5. */
14209FNIEMOP_DEF(iemOp_aad_Ib)
14210{
14211 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14212 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14214 IEMOP_HLP_NO_64BIT();
14215 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14216}
14217
14218
14219/** Opcode 0xd6. */
14220FNIEMOP_DEF(iemOp_salc)
14221{
14222 IEMOP_MNEMONIC(salc, "salc");
14223 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14224 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14226 IEMOP_HLP_NO_64BIT();
14227
14228 IEM_MC_BEGIN(0, 0);
14229 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14230 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14231 } IEM_MC_ELSE() {
14232 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14233 } IEM_MC_ENDIF();
14234 IEM_MC_ADVANCE_RIP();
14235 IEM_MC_END();
14236 return VINF_SUCCESS;
14237}
14238
14239
14240/** Opcode 0xd7. */
14241FNIEMOP_DEF(iemOp_xlat)
14242{
14243 IEMOP_MNEMONIC(xlat, "xlat");
14244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14245 switch (pVCpu->iem.s.enmEffAddrMode)
14246 {
14247 case IEMMODE_16BIT:
14248 IEM_MC_BEGIN(2, 0);
14249 IEM_MC_LOCAL(uint8_t, u8Tmp);
14250 IEM_MC_LOCAL(uint16_t, u16Addr);
14251 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14252 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14253 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14254 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14255 IEM_MC_ADVANCE_RIP();
14256 IEM_MC_END();
14257 return VINF_SUCCESS;
14258
14259 case IEMMODE_32BIT:
14260 IEM_MC_BEGIN(2, 0);
14261 IEM_MC_LOCAL(uint8_t, u8Tmp);
14262 IEM_MC_LOCAL(uint32_t, u32Addr);
14263 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14264 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14265 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14266 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14267 IEM_MC_ADVANCE_RIP();
14268 IEM_MC_END();
14269 return VINF_SUCCESS;
14270
14271 case IEMMODE_64BIT:
14272 IEM_MC_BEGIN(2, 0);
14273 IEM_MC_LOCAL(uint8_t, u8Tmp);
14274 IEM_MC_LOCAL(uint64_t, u64Addr);
14275 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14276 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14277 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14278 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14279 IEM_MC_ADVANCE_RIP();
14280 IEM_MC_END();
14281 return VINF_SUCCESS;
14282
14283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14284 }
14285}
14286
14287
14288/**
14289 * Common worker for FPU instructions working on ST0 and STn, and storing the
14290 * result in ST0.
14291 *
14292 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14293 */
14294FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14295{
14296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14297
14298 IEM_MC_BEGIN(3, 1);
14299 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14300 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14302 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14303
14304 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14305 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14306 IEM_MC_PREPARE_FPU_USAGE();
14307 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14308 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14309 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14310 IEM_MC_ELSE()
14311 IEM_MC_FPU_STACK_UNDERFLOW(0);
14312 IEM_MC_ENDIF();
14313 IEM_MC_ADVANCE_RIP();
14314
14315 IEM_MC_END();
14316 return VINF_SUCCESS;
14317}
14318
14319
14320/**
14321 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14322 * flags.
14323 *
14324 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14325 */
14326FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14327{
14328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14329
14330 IEM_MC_BEGIN(3, 1);
14331 IEM_MC_LOCAL(uint16_t, u16Fsw);
14332 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14333 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14334 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14335
14336 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14337 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14338 IEM_MC_PREPARE_FPU_USAGE();
14339 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14340 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14341 IEM_MC_UPDATE_FSW(u16Fsw);
14342 IEM_MC_ELSE()
14343 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14344 IEM_MC_ENDIF();
14345 IEM_MC_ADVANCE_RIP();
14346
14347 IEM_MC_END();
14348 return VINF_SUCCESS;
14349}
14350
14351
14352/**
14353 * Common worker for FPU instructions working on ST0 and STn, only affecting
14354 * flags, and popping when done.
14355 *
14356 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14357 */
14358FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14359{
14360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14361
14362 IEM_MC_BEGIN(3, 1);
14363 IEM_MC_LOCAL(uint16_t, u16Fsw);
14364 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14365 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14366 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14367
14368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14369 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14370 IEM_MC_PREPARE_FPU_USAGE();
14371 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14372 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14373 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14374 IEM_MC_ELSE()
14375 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14376 IEM_MC_ENDIF();
14377 IEM_MC_ADVANCE_RIP();
14378
14379 IEM_MC_END();
14380 return VINF_SUCCESS;
14381}
14382
14383
14384/** Opcode 0xd8 11/0. */
14385FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14386{
14387 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14388 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14389}
14390
14391
14392/** Opcode 0xd8 11/1. */
14393FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14394{
14395 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14396 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14397}
14398
14399
14400/** Opcode 0xd8 11/2. */
14401FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14402{
14403 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14404 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14405}
14406
14407
14408/** Opcode 0xd8 11/3. */
14409FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14410{
14411 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14412 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14413}
14414
14415
14416/** Opcode 0xd8 11/4. */
14417FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14418{
14419 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14420 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14421}
14422
14423
14424/** Opcode 0xd8 11/5. */
14425FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14426{
14427 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14428 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14429}
14430
14431
14432/** Opcode 0xd8 11/6. */
14433FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14434{
14435 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14436 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14437}
14438
14439
14440/** Opcode 0xd8 11/7. */
14441FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14442{
14443 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14444 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14445}
14446
14447
14448/**
14449 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14450 * the result in ST0.
14451 *
14452 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14453 */
14454FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14455{
14456 IEM_MC_BEGIN(3, 3);
14457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14458 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14459 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14460 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14461 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14462 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14463
14464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14466
14467 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14468 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14469 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14470
14471 IEM_MC_PREPARE_FPU_USAGE();
14472 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14473 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14474 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14475 IEM_MC_ELSE()
14476 IEM_MC_FPU_STACK_UNDERFLOW(0);
14477 IEM_MC_ENDIF();
14478 IEM_MC_ADVANCE_RIP();
14479
14480 IEM_MC_END();
14481 return VINF_SUCCESS;
14482}
14483
14484
14485/** Opcode 0xd8 !11/0. */
14486FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14487{
14488 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14489 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14490}
14491
14492
14493/** Opcode 0xd8 !11/1. */
14494FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14495{
14496 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14497 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14498}
14499
14500
14501/** Opcode 0xd8 !11/2. */
14502FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14503{
14504 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14505
14506 IEM_MC_BEGIN(3, 3);
14507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14508 IEM_MC_LOCAL(uint16_t, u16Fsw);
14509 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14510 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14511 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14512 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14513
14514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14516
14517 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14518 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14519 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14520
14521 IEM_MC_PREPARE_FPU_USAGE();
14522 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14523 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14524 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14525 IEM_MC_ELSE()
14526 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14527 IEM_MC_ENDIF();
14528 IEM_MC_ADVANCE_RIP();
14529
14530 IEM_MC_END();
14531 return VINF_SUCCESS;
14532}
14533
14534
14535/** Opcode 0xd8 !11/3. */
14536FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14537{
14538 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14539
14540 IEM_MC_BEGIN(3, 3);
14541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14542 IEM_MC_LOCAL(uint16_t, u16Fsw);
14543 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14544 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14545 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14546 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14547
14548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14550
14551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14553 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14554
14555 IEM_MC_PREPARE_FPU_USAGE();
14556 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14557 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14558 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14559 IEM_MC_ELSE()
14560 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14561 IEM_MC_ENDIF();
14562 IEM_MC_ADVANCE_RIP();
14563
14564 IEM_MC_END();
14565 return VINF_SUCCESS;
14566}
14567
14568
14569/** Opcode 0xd8 !11/4. */
14570FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14571{
14572 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14573 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14574}
14575
14576
14577/** Opcode 0xd8 !11/5. */
14578FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14579{
14580 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14581 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14582}
14583
14584
14585/** Opcode 0xd8 !11/6. */
14586FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14587{
14588 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14589 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14590}
14591
14592
14593/** Opcode 0xd8 !11/7. */
14594FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14595{
14596 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14597 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14598}
14599
14600
14601/** Opcode 0xd8. */
14602FNIEMOP_DEF(iemOp_EscF0)
14603{
14604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14605 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14606
14607 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14608 {
14609 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14610 {
14611 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14612 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14613 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14614 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14615 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14616 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14617 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14618 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14620 }
14621 }
14622 else
14623 {
14624 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14625 {
14626 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14627 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14628 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14629 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14630 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14631 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14632 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14633 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14635 }
14636 }
14637}
14638
14639
14640/** Opcode 0xd9 /0 mem32real
14641 * @sa iemOp_fld_m64r */
14642FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14643{
14644 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14645
14646 IEM_MC_BEGIN(2, 3);
14647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14648 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14649 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14650 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14651 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14652
14653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14655
14656 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14657 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14658 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14659
14660 IEM_MC_PREPARE_FPU_USAGE();
14661 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14662 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14663 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14664 IEM_MC_ELSE()
14665 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14666 IEM_MC_ENDIF();
14667 IEM_MC_ADVANCE_RIP();
14668
14669 IEM_MC_END();
14670 return VINF_SUCCESS;
14671}
14672
14673
14674/** Opcode 0xd9 !11/2 mem32real */
14675FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14676{
14677 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14678 IEM_MC_BEGIN(3, 2);
14679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14680 IEM_MC_LOCAL(uint16_t, u16Fsw);
14681 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14682 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14683 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14684
14685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14687 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14688 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14689
14690 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14691 IEM_MC_PREPARE_FPU_USAGE();
14692 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14693 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14694 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14695 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14696 IEM_MC_ELSE()
14697 IEM_MC_IF_FCW_IM()
14698 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14699 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14700 IEM_MC_ENDIF();
14701 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14702 IEM_MC_ENDIF();
14703 IEM_MC_ADVANCE_RIP();
14704
14705 IEM_MC_END();
14706 return VINF_SUCCESS;
14707}
14708
14709
14710/** Opcode 0xd9 !11/3 */
14711FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14712{
14713 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14714 IEM_MC_BEGIN(3, 2);
14715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14716 IEM_MC_LOCAL(uint16_t, u16Fsw);
14717 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14718 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14719 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14720
14721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14724 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14725
14726 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14727 IEM_MC_PREPARE_FPU_USAGE();
14728 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14729 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14730 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14731 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14732 IEM_MC_ELSE()
14733 IEM_MC_IF_FCW_IM()
14734 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14735 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14736 IEM_MC_ENDIF();
14737 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14738 IEM_MC_ENDIF();
14739 IEM_MC_ADVANCE_RIP();
14740
14741 IEM_MC_END();
14742 return VINF_SUCCESS;
14743}
14744
14745
14746/** Opcode 0xd9 !11/4 */
14747FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14748{
14749 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14750 IEM_MC_BEGIN(3, 0);
14751 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14752 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14753 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14757 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14758 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14759 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14760 IEM_MC_END();
14761 return VINF_SUCCESS;
14762}
14763
14764
14765/** Opcode 0xd9 !11/5 */
14766FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14767{
14768 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14769 IEM_MC_BEGIN(1, 1);
14770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14771 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14775 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14776 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14777 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14778 IEM_MC_END();
14779 return VINF_SUCCESS;
14780}
14781
14782
14783/** Opcode 0xd9 !11/6 */
14784FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14785{
14786 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14787 IEM_MC_BEGIN(3, 0);
14788 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14789 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14790 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14794 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14795 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14796 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14797 IEM_MC_END();
14798 return VINF_SUCCESS;
14799}
14800
14801
14802/** Opcode 0xd9 !11/7 */
14803FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14804{
14805 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14806 IEM_MC_BEGIN(2, 0);
14807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14808 IEM_MC_LOCAL(uint16_t, u16Fcw);
14809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14812 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14813 IEM_MC_FETCH_FCW(u16Fcw);
14814 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14815 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14816 IEM_MC_END();
14817 return VINF_SUCCESS;
14818}
14819
14820
14821/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14822FNIEMOP_DEF(iemOp_fnop)
14823{
14824 IEMOP_MNEMONIC(fnop, "fnop");
14825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14826
14827 IEM_MC_BEGIN(0, 0);
14828 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14829 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14830 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14831 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14832 * intel optimizations. Investigate. */
14833 IEM_MC_UPDATE_FPU_OPCODE_IP();
14834 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14835 IEM_MC_END();
14836 return VINF_SUCCESS;
14837}
14838
14839
14840/** Opcode 0xd9 11/0 stN */
14841FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14842{
14843 IEMOP_MNEMONIC(fld_stN, "fld stN");
14844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14845
14846 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14847 * indicates that it does. */
14848 IEM_MC_BEGIN(0, 2);
14849 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14850 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14853
14854 IEM_MC_PREPARE_FPU_USAGE();
14855 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14856 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14857 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14858 IEM_MC_ELSE()
14859 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14860 IEM_MC_ENDIF();
14861
14862 IEM_MC_ADVANCE_RIP();
14863 IEM_MC_END();
14864
14865 return VINF_SUCCESS;
14866}
14867
14868
14869/** Opcode 0xd9 11/3 stN */
14870FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14871{
14872 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14874
14875 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14876 * indicates that it does. */
14877 IEM_MC_BEGIN(1, 3);
14878 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14879 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14880 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14881 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14882 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14883 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14884
14885 IEM_MC_PREPARE_FPU_USAGE();
14886 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14887 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14888 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14889 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14890 IEM_MC_ELSE()
14891 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14892 IEM_MC_ENDIF();
14893
14894 IEM_MC_ADVANCE_RIP();
14895 IEM_MC_END();
14896
14897 return VINF_SUCCESS;
14898}
14899
14900
14901/** Opcode 0xd9 11/4, 0xdd 11/2. */
14902FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14903{
14904 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14906
14907 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14908 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14909 if (!iDstReg)
14910 {
14911 IEM_MC_BEGIN(0, 1);
14912 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14915
14916 IEM_MC_PREPARE_FPU_USAGE();
14917 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14918 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14919 IEM_MC_ELSE()
14920 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14921 IEM_MC_ENDIF();
14922
14923 IEM_MC_ADVANCE_RIP();
14924 IEM_MC_END();
14925 }
14926 else
14927 {
14928 IEM_MC_BEGIN(0, 2);
14929 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14930 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14932 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14933
14934 IEM_MC_PREPARE_FPU_USAGE();
14935 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14936 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14937 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14938 IEM_MC_ELSE()
14939 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14940 IEM_MC_ENDIF();
14941
14942 IEM_MC_ADVANCE_RIP();
14943 IEM_MC_END();
14944 }
14945 return VINF_SUCCESS;
14946}
14947
14948
14949/**
14950 * Common worker for FPU instructions working on ST0 and replaces it with the
14951 * result, i.e. unary operators.
14952 *
14953 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14954 */
14955FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14956{
14957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14958
14959 IEM_MC_BEGIN(2, 1);
14960 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14961 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14962 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14963
14964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14966 IEM_MC_PREPARE_FPU_USAGE();
14967 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14968 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14969 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14970 IEM_MC_ELSE()
14971 IEM_MC_FPU_STACK_UNDERFLOW(0);
14972 IEM_MC_ENDIF();
14973 IEM_MC_ADVANCE_RIP();
14974
14975 IEM_MC_END();
14976 return VINF_SUCCESS;
14977}
14978
14979
14980/** Opcode 0xd9 0xe0. */
14981FNIEMOP_DEF(iemOp_fchs)
14982{
14983 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14984 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14985}
14986
14987
14988/** Opcode 0xd9 0xe1. */
14989FNIEMOP_DEF(iemOp_fabs)
14990{
14991 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14992 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14993}
14994
14995
14996/**
14997 * Common worker for FPU instructions working on ST0 and only returns FSW.
14998 *
14999 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15000 */
15001FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
15002{
15003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15004
15005 IEM_MC_BEGIN(2, 1);
15006 IEM_MC_LOCAL(uint16_t, u16Fsw);
15007 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15008 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15009
15010 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15011 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15012 IEM_MC_PREPARE_FPU_USAGE();
15013 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15014 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
15015 IEM_MC_UPDATE_FSW(u16Fsw);
15016 IEM_MC_ELSE()
15017 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
15018 IEM_MC_ENDIF();
15019 IEM_MC_ADVANCE_RIP();
15020
15021 IEM_MC_END();
15022 return VINF_SUCCESS;
15023}
15024
15025
15026/** Opcode 0xd9 0xe4. */
15027FNIEMOP_DEF(iemOp_ftst)
15028{
15029 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
15030 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
15031}
15032
15033
15034/** Opcode 0xd9 0xe5. */
15035FNIEMOP_DEF(iemOp_fxam)
15036{
15037 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
15038 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
15039}
15040
15041
15042/**
15043 * Common worker for FPU instructions pushing a constant onto the FPU stack.
15044 *
15045 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15046 */
15047FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
15048{
15049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15050
15051 IEM_MC_BEGIN(1, 1);
15052 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15053 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15054
15055 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15056 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15057 IEM_MC_PREPARE_FPU_USAGE();
15058 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15059 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
15060 IEM_MC_PUSH_FPU_RESULT(FpuRes);
15061 IEM_MC_ELSE()
15062 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
15063 IEM_MC_ENDIF();
15064 IEM_MC_ADVANCE_RIP();
15065
15066 IEM_MC_END();
15067 return VINF_SUCCESS;
15068}
15069
15070
15071/** Opcode 0xd9 0xe8. */
15072FNIEMOP_DEF(iemOp_fld1)
15073{
15074 IEMOP_MNEMONIC(fld1, "fld1");
15075 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
15076}
15077
15078
15079/** Opcode 0xd9 0xe9. */
15080FNIEMOP_DEF(iemOp_fldl2t)
15081{
15082 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15083 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15084}
15085
15086
15087/** Opcode 0xd9 0xea. */
15088FNIEMOP_DEF(iemOp_fldl2e)
15089{
15090 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15091 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15092}
15093
15094/** Opcode 0xd9 0xeb. */
15095FNIEMOP_DEF(iemOp_fldpi)
15096{
15097 IEMOP_MNEMONIC(fldpi, "fldpi");
15098 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15099}
15100
15101
15102/** Opcode 0xd9 0xec. */
15103FNIEMOP_DEF(iemOp_fldlg2)
15104{
15105 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15106 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15107}
15108
15109/** Opcode 0xd9 0xed. */
15110FNIEMOP_DEF(iemOp_fldln2)
15111{
15112 IEMOP_MNEMONIC(fldln2, "fldln2");
15113 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15114}
15115
15116
15117/** Opcode 0xd9 0xee. */
15118FNIEMOP_DEF(iemOp_fldz)
15119{
15120 IEMOP_MNEMONIC(fldz, "fldz");
15121 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15122}
15123
15124
15125/** Opcode 0xd9 0xf0. */
15126FNIEMOP_DEF(iemOp_f2xm1)
15127{
15128 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15129 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15130}
15131
15132
15133/**
15134 * Common worker for FPU instructions working on STn and ST0, storing the result
15135 * in STn, and popping the stack unless IE, DE or ZE was raised.
15136 *
15137 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15138 */
15139FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15140{
15141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15142
15143 IEM_MC_BEGIN(3, 1);
15144 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15145 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15146 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15147 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15148
15149 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15150 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15151
15152 IEM_MC_PREPARE_FPU_USAGE();
15153 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15154 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15155 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15156 IEM_MC_ELSE()
15157 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15158 IEM_MC_ENDIF();
15159 IEM_MC_ADVANCE_RIP();
15160
15161 IEM_MC_END();
15162 return VINF_SUCCESS;
15163}
15164
15165
15166/** Opcode 0xd9 0xf1. */
15167FNIEMOP_DEF(iemOp_fyl2x)
15168{
15169 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15170 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15171}
15172
15173
15174/**
15175 * Common worker for FPU instructions working on ST0 and having two outputs, one
15176 * replacing ST0 and one pushed onto the stack.
15177 *
15178 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15179 */
15180FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15181{
15182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15183
15184 IEM_MC_BEGIN(2, 1);
15185 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15186 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15187 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15188
15189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15190 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15191 IEM_MC_PREPARE_FPU_USAGE();
15192 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15193 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15194 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15195 IEM_MC_ELSE()
15196 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15197 IEM_MC_ENDIF();
15198 IEM_MC_ADVANCE_RIP();
15199
15200 IEM_MC_END();
15201 return VINF_SUCCESS;
15202}
15203
15204
15205/** Opcode 0xd9 0xf2. */
15206FNIEMOP_DEF(iemOp_fptan)
15207{
15208 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15209 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15210}
15211
15212
15213/** Opcode 0xd9 0xf3. */
15214FNIEMOP_DEF(iemOp_fpatan)
15215{
15216 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15217 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15218}
15219
15220
15221/** Opcode 0xd9 0xf4. */
15222FNIEMOP_DEF(iemOp_fxtract)
15223{
15224 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15225 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15226}
15227
15228
15229/** Opcode 0xd9 0xf5. */
15230FNIEMOP_DEF(iemOp_fprem1)
15231{
15232 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15233 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15234}
15235
15236
15237/** Opcode 0xd9 0xf6. */
15238FNIEMOP_DEF(iemOp_fdecstp)
15239{
15240 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15242 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15243 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15244 * FINCSTP and FDECSTP. */
15245
15246 IEM_MC_BEGIN(0,0);
15247
15248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15249 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15250
15251 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15252 IEM_MC_FPU_STACK_DEC_TOP();
15253 IEM_MC_UPDATE_FSW_CONST(0);
15254
15255 IEM_MC_ADVANCE_RIP();
15256 IEM_MC_END();
15257 return VINF_SUCCESS;
15258}
15259
15260
15261/** Opcode 0xd9 0xf7. */
15262FNIEMOP_DEF(iemOp_fincstp)
15263{
15264 IEMOP_MNEMONIC(fincstp, "fincstp");
15265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15266 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15267 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15268 * FINCSTP and FDECSTP. */
15269
15270 IEM_MC_BEGIN(0,0);
15271
15272 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15273 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15274
15275 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15276 IEM_MC_FPU_STACK_INC_TOP();
15277 IEM_MC_UPDATE_FSW_CONST(0);
15278
15279 IEM_MC_ADVANCE_RIP();
15280 IEM_MC_END();
15281 return VINF_SUCCESS;
15282}
15283
15284
15285/** Opcode 0xd9 0xf8. */
15286FNIEMOP_DEF(iemOp_fprem)
15287{
15288 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15289 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15290}
15291
15292
15293/** Opcode 0xd9 0xf9. */
15294FNIEMOP_DEF(iemOp_fyl2xp1)
15295{
15296 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15297 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15298}
15299
15300
15301/** Opcode 0xd9 0xfa. */
15302FNIEMOP_DEF(iemOp_fsqrt)
15303{
15304 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15305 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15306}
15307
15308
15309/** Opcode 0xd9 0xfb. */
15310FNIEMOP_DEF(iemOp_fsincos)
15311{
15312 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15313 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15314}
15315
15316
15317/** Opcode 0xd9 0xfc. */
15318FNIEMOP_DEF(iemOp_frndint)
15319{
15320 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15321 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15322}
15323
15324
15325/** Opcode 0xd9 0xfd. */
15326FNIEMOP_DEF(iemOp_fscale)
15327{
15328 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15329 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15330}
15331
15332
15333/** Opcode 0xd9 0xfe. */
15334FNIEMOP_DEF(iemOp_fsin)
15335{
15336 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15337 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15338}
15339
15340
15341/** Opcode 0xd9 0xff. */
15342FNIEMOP_DEF(iemOp_fcos)
15343{
15344 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15345 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15346}
15347
15348
15349/** Used by iemOp_EscF1. */
15350IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15351{
15352 /* 0xe0 */ iemOp_fchs,
15353 /* 0xe1 */ iemOp_fabs,
15354 /* 0xe2 */ iemOp_Invalid,
15355 /* 0xe3 */ iemOp_Invalid,
15356 /* 0xe4 */ iemOp_ftst,
15357 /* 0xe5 */ iemOp_fxam,
15358 /* 0xe6 */ iemOp_Invalid,
15359 /* 0xe7 */ iemOp_Invalid,
15360 /* 0xe8 */ iemOp_fld1,
15361 /* 0xe9 */ iemOp_fldl2t,
15362 /* 0xea */ iemOp_fldl2e,
15363 /* 0xeb */ iemOp_fldpi,
15364 /* 0xec */ iemOp_fldlg2,
15365 /* 0xed */ iemOp_fldln2,
15366 /* 0xee */ iemOp_fldz,
15367 /* 0xef */ iemOp_Invalid,
15368 /* 0xf0 */ iemOp_f2xm1,
15369 /* 0xf1 */ iemOp_fyl2x,
15370 /* 0xf2 */ iemOp_fptan,
15371 /* 0xf3 */ iemOp_fpatan,
15372 /* 0xf4 */ iemOp_fxtract,
15373 /* 0xf5 */ iemOp_fprem1,
15374 /* 0xf6 */ iemOp_fdecstp,
15375 /* 0xf7 */ iemOp_fincstp,
15376 /* 0xf8 */ iemOp_fprem,
15377 /* 0xf9 */ iemOp_fyl2xp1,
15378 /* 0xfa */ iemOp_fsqrt,
15379 /* 0xfb */ iemOp_fsincos,
15380 /* 0xfc */ iemOp_frndint,
15381 /* 0xfd */ iemOp_fscale,
15382 /* 0xfe */ iemOp_fsin,
15383 /* 0xff */ iemOp_fcos
15384};
15385
15386
15387/** Opcode 0xd9. */
15388FNIEMOP_DEF(iemOp_EscF1)
15389{
15390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15391 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15392
15393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15394 {
15395 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15396 {
15397 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15398 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15399 case 2:
15400 if (bRm == 0xd0)
15401 return FNIEMOP_CALL(iemOp_fnop);
15402 return IEMOP_RAISE_INVALID_OPCODE();
15403 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15404 case 4:
15405 case 5:
15406 case 6:
15407 case 7:
15408 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15409 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15411 }
15412 }
15413 else
15414 {
15415 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15416 {
15417 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15418 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15419 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15420 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15421 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15422 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15423 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15424 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15426 }
15427 }
15428}
15429
15430
15431/** Opcode 0xda 11/0. */
15432FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15433{
15434 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15436
15437 IEM_MC_BEGIN(0, 1);
15438 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15439
15440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15441 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15442
15443 IEM_MC_PREPARE_FPU_USAGE();
15444 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15445 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15446 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15447 IEM_MC_ENDIF();
15448 IEM_MC_UPDATE_FPU_OPCODE_IP();
15449 IEM_MC_ELSE()
15450 IEM_MC_FPU_STACK_UNDERFLOW(0);
15451 IEM_MC_ENDIF();
15452 IEM_MC_ADVANCE_RIP();
15453
15454 IEM_MC_END();
15455 return VINF_SUCCESS;
15456}
15457
15458
15459/** Opcode 0xda 11/1. */
15460FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15461{
15462 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15464
15465 IEM_MC_BEGIN(0, 1);
15466 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15467
15468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15470
15471 IEM_MC_PREPARE_FPU_USAGE();
15472 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15474 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15475 IEM_MC_ENDIF();
15476 IEM_MC_UPDATE_FPU_OPCODE_IP();
15477 IEM_MC_ELSE()
15478 IEM_MC_FPU_STACK_UNDERFLOW(0);
15479 IEM_MC_ENDIF();
15480 IEM_MC_ADVANCE_RIP();
15481
15482 IEM_MC_END();
15483 return VINF_SUCCESS;
15484}
15485
15486
15487/** Opcode 0xda 11/2. */
15488FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15489{
15490 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15492
15493 IEM_MC_BEGIN(0, 1);
15494 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15495
15496 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15497 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15498
15499 IEM_MC_PREPARE_FPU_USAGE();
15500 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15501 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15502 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15503 IEM_MC_ENDIF();
15504 IEM_MC_UPDATE_FPU_OPCODE_IP();
15505 IEM_MC_ELSE()
15506 IEM_MC_FPU_STACK_UNDERFLOW(0);
15507 IEM_MC_ENDIF();
15508 IEM_MC_ADVANCE_RIP();
15509
15510 IEM_MC_END();
15511 return VINF_SUCCESS;
15512}
15513
15514
15515/** Opcode 0xda 11/3. */
15516FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15517{
15518 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15520
15521 IEM_MC_BEGIN(0, 1);
15522 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15523
15524 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15525 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15526
15527 IEM_MC_PREPARE_FPU_USAGE();
15528 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15530 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15531 IEM_MC_ENDIF();
15532 IEM_MC_UPDATE_FPU_OPCODE_IP();
15533 IEM_MC_ELSE()
15534 IEM_MC_FPU_STACK_UNDERFLOW(0);
15535 IEM_MC_ENDIF();
15536 IEM_MC_ADVANCE_RIP();
15537
15538 IEM_MC_END();
15539 return VINF_SUCCESS;
15540}
15541
15542
15543/**
15544 * Common worker for FPU instructions working on ST0 and STn, only affecting
15545 * flags, and popping twice when done.
15546 *
15547 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15548 */
15549FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15550{
15551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15552
15553 IEM_MC_BEGIN(3, 1);
15554 IEM_MC_LOCAL(uint16_t, u16Fsw);
15555 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15556 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15557 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15558
15559 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15560 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15561
15562 IEM_MC_PREPARE_FPU_USAGE();
15563 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15564 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15565 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15566 IEM_MC_ELSE()
15567 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15568 IEM_MC_ENDIF();
15569 IEM_MC_ADVANCE_RIP();
15570
15571 IEM_MC_END();
15572 return VINF_SUCCESS;
15573}
15574
15575
15576/** Opcode 0xda 0xe9. */
15577FNIEMOP_DEF(iemOp_fucompp)
15578{
15579 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15580 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15581}
15582
15583
15584/**
15585 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15586 * the result in ST0.
15587 *
15588 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15589 */
15590FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15591{
15592 IEM_MC_BEGIN(3, 3);
15593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15594 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15595 IEM_MC_LOCAL(int32_t, i32Val2);
15596 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15597 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15598 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15599
15600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15602
15603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15605 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15606
15607 IEM_MC_PREPARE_FPU_USAGE();
15608 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15609 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15610 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15611 IEM_MC_ELSE()
15612 IEM_MC_FPU_STACK_UNDERFLOW(0);
15613 IEM_MC_ENDIF();
15614 IEM_MC_ADVANCE_RIP();
15615
15616 IEM_MC_END();
15617 return VINF_SUCCESS;
15618}
15619
15620
15621/** Opcode 0xda !11/0. */
15622FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15623{
15624 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15625 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15626}
15627
15628
15629/** Opcode 0xda !11/1. */
15630FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15631{
15632 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15633 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15634}
15635
15636
15637/** Opcode 0xda !11/2. */
15638FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15639{
15640 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15641
15642 IEM_MC_BEGIN(3, 3);
15643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15644 IEM_MC_LOCAL(uint16_t, u16Fsw);
15645 IEM_MC_LOCAL(int32_t, i32Val2);
15646 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15647 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15648 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15649
15650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15652
15653 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15654 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15655 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15656
15657 IEM_MC_PREPARE_FPU_USAGE();
15658 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15659 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15660 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15661 IEM_MC_ELSE()
15662 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15663 IEM_MC_ENDIF();
15664 IEM_MC_ADVANCE_RIP();
15665
15666 IEM_MC_END();
15667 return VINF_SUCCESS;
15668}
15669
15670
15671/** Opcode 0xda !11/3. */
15672FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15673{
15674 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15675
15676 IEM_MC_BEGIN(3, 3);
15677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15678 IEM_MC_LOCAL(uint16_t, u16Fsw);
15679 IEM_MC_LOCAL(int32_t, i32Val2);
15680 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15681 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15682 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15683
15684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15686
15687 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15688 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15689 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15690
15691 IEM_MC_PREPARE_FPU_USAGE();
15692 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15693 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15694 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15695 IEM_MC_ELSE()
15696 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15697 IEM_MC_ENDIF();
15698 IEM_MC_ADVANCE_RIP();
15699
15700 IEM_MC_END();
15701 return VINF_SUCCESS;
15702}
15703
15704
15705/** Opcode 0xda !11/4. */
15706FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15707{
15708 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15709 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15710}
15711
15712
15713/** Opcode 0xda !11/5. */
15714FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15715{
15716 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15717 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15718}
15719
15720
15721/** Opcode 0xda !11/6. */
15722FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15723{
15724 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15725 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15726}
15727
15728
15729/** Opcode 0xda !11/7. */
15730FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15731{
15732 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15733 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15734}
15735
15736
15737/** Opcode 0xda. */
15738FNIEMOP_DEF(iemOp_EscF2)
15739{
15740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15741 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15743 {
15744 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15745 {
15746 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15747 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15748 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15749 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15750 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15751 case 5:
15752 if (bRm == 0xe9)
15753 return FNIEMOP_CALL(iemOp_fucompp);
15754 return IEMOP_RAISE_INVALID_OPCODE();
15755 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15756 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15758 }
15759 }
15760 else
15761 {
15762 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15763 {
15764 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15765 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15766 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15767 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15768 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15769 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15770 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15771 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15773 }
15774 }
15775}
15776
15777
15778/** Opcode 0xdb !11/0. */
15779FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15780{
15781 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15782
15783 IEM_MC_BEGIN(2, 3);
15784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15785 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15786 IEM_MC_LOCAL(int32_t, i32Val);
15787 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15788 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15789
15790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15792
15793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15794 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15795 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15796
15797 IEM_MC_PREPARE_FPU_USAGE();
15798 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15799 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15800 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15801 IEM_MC_ELSE()
15802 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15803 IEM_MC_ENDIF();
15804 IEM_MC_ADVANCE_RIP();
15805
15806 IEM_MC_END();
15807 return VINF_SUCCESS;
15808}
15809
15810
15811/** Opcode 0xdb !11/1. */
15812FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15813{
15814 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15815 IEM_MC_BEGIN(3, 2);
15816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15817 IEM_MC_LOCAL(uint16_t, u16Fsw);
15818 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15819 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15820 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15821
15822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15824 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15825 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15826
15827 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15828 IEM_MC_PREPARE_FPU_USAGE();
15829 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15830 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15831 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15832 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15833 IEM_MC_ELSE()
15834 IEM_MC_IF_FCW_IM()
15835 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15836 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15837 IEM_MC_ENDIF();
15838 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15839 IEM_MC_ENDIF();
15840 IEM_MC_ADVANCE_RIP();
15841
15842 IEM_MC_END();
15843 return VINF_SUCCESS;
15844}
15845
15846
15847/** Opcode 0xdb !11/2. */
15848FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15849{
15850 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15851 IEM_MC_BEGIN(3, 2);
15852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15853 IEM_MC_LOCAL(uint16_t, u16Fsw);
15854 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15855 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15856 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15857
15858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15860 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15861 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15862
15863 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15864 IEM_MC_PREPARE_FPU_USAGE();
15865 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15866 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15867 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15868 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15869 IEM_MC_ELSE()
15870 IEM_MC_IF_FCW_IM()
15871 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15872 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15873 IEM_MC_ENDIF();
15874 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15875 IEM_MC_ENDIF();
15876 IEM_MC_ADVANCE_RIP();
15877
15878 IEM_MC_END();
15879 return VINF_SUCCESS;
15880}
15881
15882
15883/** Opcode 0xdb !11/3. */
15884FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15885{
15886 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15887 IEM_MC_BEGIN(3, 2);
15888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15889 IEM_MC_LOCAL(uint16_t, u16Fsw);
15890 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15891 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15892 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15893
15894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15896 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15897 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15898
15899 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15900 IEM_MC_PREPARE_FPU_USAGE();
15901 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15902 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15903 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15904 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15905 IEM_MC_ELSE()
15906 IEM_MC_IF_FCW_IM()
15907 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15908 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15909 IEM_MC_ENDIF();
15910 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15911 IEM_MC_ENDIF();
15912 IEM_MC_ADVANCE_RIP();
15913
15914 IEM_MC_END();
15915 return VINF_SUCCESS;
15916}
15917
15918
15919/** Opcode 0xdb !11/5. */
15920FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15921{
15922 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15923
15924 IEM_MC_BEGIN(2, 3);
15925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15926 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15927 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15928 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15929 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15930
15931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15933
15934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15935 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15936 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15937
15938 IEM_MC_PREPARE_FPU_USAGE();
15939 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15940 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15941 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15942 IEM_MC_ELSE()
15943 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15944 IEM_MC_ENDIF();
15945 IEM_MC_ADVANCE_RIP();
15946
15947 IEM_MC_END();
15948 return VINF_SUCCESS;
15949}
15950
15951
15952/** Opcode 0xdb !11/7. */
15953FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15954{
15955 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15956 IEM_MC_BEGIN(3, 2);
15957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15958 IEM_MC_LOCAL(uint16_t, u16Fsw);
15959 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15960 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15961 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15962
15963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15966 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15967
15968 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15969 IEM_MC_PREPARE_FPU_USAGE();
15970 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15971 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15972 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15973 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15974 IEM_MC_ELSE()
15975 IEM_MC_IF_FCW_IM()
15976 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15977 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15978 IEM_MC_ENDIF();
15979 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15980 IEM_MC_ENDIF();
15981 IEM_MC_ADVANCE_RIP();
15982
15983 IEM_MC_END();
15984 return VINF_SUCCESS;
15985}
15986
15987
15988/** Opcode 0xdb 11/0. */
15989FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15990{
15991 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15993
15994 IEM_MC_BEGIN(0, 1);
15995 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15996
15997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15999
16000 IEM_MC_PREPARE_FPU_USAGE();
16001 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16002 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
16003 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16004 IEM_MC_ENDIF();
16005 IEM_MC_UPDATE_FPU_OPCODE_IP();
16006 IEM_MC_ELSE()
16007 IEM_MC_FPU_STACK_UNDERFLOW(0);
16008 IEM_MC_ENDIF();
16009 IEM_MC_ADVANCE_RIP();
16010
16011 IEM_MC_END();
16012 return VINF_SUCCESS;
16013}
16014
16015
16016/** Opcode 0xdb 11/1. */
16017FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
16018{
16019 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
16020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16021
16022 IEM_MC_BEGIN(0, 1);
16023 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16024
16025 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16026 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16027
16028 IEM_MC_PREPARE_FPU_USAGE();
16029 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16030 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
16031 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16032 IEM_MC_ENDIF();
16033 IEM_MC_UPDATE_FPU_OPCODE_IP();
16034 IEM_MC_ELSE()
16035 IEM_MC_FPU_STACK_UNDERFLOW(0);
16036 IEM_MC_ENDIF();
16037 IEM_MC_ADVANCE_RIP();
16038
16039 IEM_MC_END();
16040 return VINF_SUCCESS;
16041}
16042
16043
16044/** Opcode 0xdb 11/2. */
16045FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
16046{
16047 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
16048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16049
16050 IEM_MC_BEGIN(0, 1);
16051 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16052
16053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16054 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16055
16056 IEM_MC_PREPARE_FPU_USAGE();
16057 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16058 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
16059 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16060 IEM_MC_ENDIF();
16061 IEM_MC_UPDATE_FPU_OPCODE_IP();
16062 IEM_MC_ELSE()
16063 IEM_MC_FPU_STACK_UNDERFLOW(0);
16064 IEM_MC_ENDIF();
16065 IEM_MC_ADVANCE_RIP();
16066
16067 IEM_MC_END();
16068 return VINF_SUCCESS;
16069}
16070
16071
16072/** Opcode 0xdb 11/3. */
16073FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
16074{
16075 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
16076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16077
16078 IEM_MC_BEGIN(0, 1);
16079 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16080
16081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16082 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16083
16084 IEM_MC_PREPARE_FPU_USAGE();
16085 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16086 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16087 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16088 IEM_MC_ENDIF();
16089 IEM_MC_UPDATE_FPU_OPCODE_IP();
16090 IEM_MC_ELSE()
16091 IEM_MC_FPU_STACK_UNDERFLOW(0);
16092 IEM_MC_ENDIF();
16093 IEM_MC_ADVANCE_RIP();
16094
16095 IEM_MC_END();
16096 return VINF_SUCCESS;
16097}
16098
16099
16100/** Opcode 0xdb 0xe0. */
16101FNIEMOP_DEF(iemOp_fneni)
16102{
16103 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16105 IEM_MC_BEGIN(0,0);
16106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16107 IEM_MC_ADVANCE_RIP();
16108 IEM_MC_END();
16109 return VINF_SUCCESS;
16110}
16111
16112
16113/** Opcode 0xdb 0xe1. */
16114FNIEMOP_DEF(iemOp_fndisi)
16115{
16116 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16118 IEM_MC_BEGIN(0,0);
16119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16120 IEM_MC_ADVANCE_RIP();
16121 IEM_MC_END();
16122 return VINF_SUCCESS;
16123}
16124
16125
16126/** Opcode 0xdb 0xe2. */
16127FNIEMOP_DEF(iemOp_fnclex)
16128{
16129 IEMOP_MNEMONIC(fnclex, "fnclex");
16130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16131
16132 IEM_MC_BEGIN(0,0);
16133 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16134 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16135 IEM_MC_CLEAR_FSW_EX();
16136 IEM_MC_ADVANCE_RIP();
16137 IEM_MC_END();
16138 return VINF_SUCCESS;
16139}
16140
16141
16142/** Opcode 0xdb 0xe3. */
16143FNIEMOP_DEF(iemOp_fninit)
16144{
16145 IEMOP_MNEMONIC(fninit, "fninit");
16146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16147 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16148}
16149
16150
16151/** Opcode 0xdb 0xe4. */
16152FNIEMOP_DEF(iemOp_fnsetpm)
16153{
16154 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16156 IEM_MC_BEGIN(0,0);
16157 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16158 IEM_MC_ADVANCE_RIP();
16159 IEM_MC_END();
16160 return VINF_SUCCESS;
16161}
16162
16163
16164/** Opcode 0xdb 0xe5. */
16165FNIEMOP_DEF(iemOp_frstpm)
16166{
16167 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16168#if 0 /* #UDs on newer CPUs */
16169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16170 IEM_MC_BEGIN(0,0);
16171 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16172 IEM_MC_ADVANCE_RIP();
16173 IEM_MC_END();
16174 return VINF_SUCCESS;
16175#else
16176 return IEMOP_RAISE_INVALID_OPCODE();
16177#endif
16178}
16179
16180
16181/** Opcode 0xdb 11/5. */
16182FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16183{
16184 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16185 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16186}
16187
16188
16189/** Opcode 0xdb 11/6. */
16190FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16191{
16192 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16193 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16194}
16195
16196
16197/** Opcode 0xdb. */
16198FNIEMOP_DEF(iemOp_EscF3)
16199{
16200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16201 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16203 {
16204 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16205 {
16206 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16207 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16208 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16209 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16210 case 4:
16211 switch (bRm)
16212 {
16213 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16214 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16215 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16216 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16217 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16218 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16219 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16220 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16222 }
16223 break;
16224 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16225 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16226 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16228 }
16229 }
16230 else
16231 {
16232 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16233 {
16234 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16235 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16236 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16237 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16238 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16239 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16240 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16241 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16243 }
16244 }
16245}
16246
16247
16248/**
16249 * Common worker for FPU instructions working on STn and ST0, and storing the
16250 * result in STn unless IE, DE or ZE was raised.
16251 *
16252 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16253 */
16254FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16255{
16256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16257
16258 IEM_MC_BEGIN(3, 1);
16259 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16260 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16261 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16262 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16263
16264 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16265 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16266
16267 IEM_MC_PREPARE_FPU_USAGE();
16268 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16269 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16270 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16271 IEM_MC_ELSE()
16272 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16273 IEM_MC_ENDIF();
16274 IEM_MC_ADVANCE_RIP();
16275
16276 IEM_MC_END();
16277 return VINF_SUCCESS;
16278}
16279
16280
16281/** Opcode 0xdc 11/0. */
16282FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16283{
16284 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16285 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16286}
16287
16288
16289/** Opcode 0xdc 11/1. */
16290FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16291{
16292 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16293 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16294}
16295
16296
16297/** Opcode 0xdc 11/4. */
16298FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16299{
16300 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16301 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16302}
16303
16304
16305/** Opcode 0xdc 11/5. */
16306FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16307{
16308 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16309 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16310}
16311
16312
16313/** Opcode 0xdc 11/6. */
16314FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16315{
16316 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16317 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16318}
16319
16320
16321/** Opcode 0xdc 11/7. */
16322FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16323{
16324 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16325 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16326}
16327
16328
16329/**
16330 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16331 * memory operand, and storing the result in ST0.
16332 *
16333 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16334 */
16335FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16336{
16337 IEM_MC_BEGIN(3, 3);
16338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16339 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16340 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16341 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16342 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16343 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16344
16345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16349
16350 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16351 IEM_MC_PREPARE_FPU_USAGE();
16352 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16353 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16354 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16355 IEM_MC_ELSE()
16356 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16357 IEM_MC_ENDIF();
16358 IEM_MC_ADVANCE_RIP();
16359
16360 IEM_MC_END();
16361 return VINF_SUCCESS;
16362}
16363
16364
16365/** Opcode 0xdc !11/0. */
16366FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16367{
16368 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16369 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16370}
16371
16372
16373/** Opcode 0xdc !11/1. */
16374FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16375{
16376 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16377 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16378}
16379
16380
16381/** Opcode 0xdc !11/2. */
16382FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16383{
16384 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16385
16386 IEM_MC_BEGIN(3, 3);
16387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16388 IEM_MC_LOCAL(uint16_t, u16Fsw);
16389 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16390 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16391 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16392 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16393
16394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16396
16397 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16398 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16399 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16400
16401 IEM_MC_PREPARE_FPU_USAGE();
16402 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16403 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16404 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16405 IEM_MC_ELSE()
16406 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16407 IEM_MC_ENDIF();
16408 IEM_MC_ADVANCE_RIP();
16409
16410 IEM_MC_END();
16411 return VINF_SUCCESS;
16412}
16413
16414
16415/** Opcode 0xdc !11/3. */
16416FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16417{
16418 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16419
16420 IEM_MC_BEGIN(3, 3);
16421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16422 IEM_MC_LOCAL(uint16_t, u16Fsw);
16423 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16424 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16426 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16427
16428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16430
16431 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16432 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16433 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16434
16435 IEM_MC_PREPARE_FPU_USAGE();
16436 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16437 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16438 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16439 IEM_MC_ELSE()
16440 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16441 IEM_MC_ENDIF();
16442 IEM_MC_ADVANCE_RIP();
16443
16444 IEM_MC_END();
16445 return VINF_SUCCESS;
16446}
16447
16448
16449/** Opcode 0xdc !11/4. */
16450FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16451{
16452 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16453 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16454}
16455
16456
16457/** Opcode 0xdc !11/5. */
16458FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16459{
16460 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16461 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16462}
16463
16464
16465/** Opcode 0xdc !11/6. */
16466FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16467{
16468 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16469 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16470}
16471
16472
16473/** Opcode 0xdc !11/7. */
16474FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16475{
16476 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16477 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16478}
16479
16480
16481/** Opcode 0xdc. */
16482FNIEMOP_DEF(iemOp_EscF4)
16483{
16484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16485 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16486 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16487 {
16488 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16489 {
16490 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16491 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16492 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16493 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16494 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16495 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16496 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16497 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16499 }
16500 }
16501 else
16502 {
16503 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16504 {
16505 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16506 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16507 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16508 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16509 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16510 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16511 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16512 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16514 }
16515 }
16516}
16517
16518
16519/** Opcode 0xdd !11/0.
16520 * @sa iemOp_fld_m32r */
16521FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16522{
16523 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16524
16525 IEM_MC_BEGIN(2, 3);
16526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16527 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16528 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16529 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16530 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16531
16532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16536
16537 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16538 IEM_MC_PREPARE_FPU_USAGE();
16539 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16540 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16541 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16542 IEM_MC_ELSE()
16543 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16544 IEM_MC_ENDIF();
16545 IEM_MC_ADVANCE_RIP();
16546
16547 IEM_MC_END();
16548 return VINF_SUCCESS;
16549}
16550
16551
16552/** Opcode 0xdd !11/0. */
16553FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16554{
16555 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16556 IEM_MC_BEGIN(3, 2);
16557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16558 IEM_MC_LOCAL(uint16_t, u16Fsw);
16559 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16560 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16561 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16562
16563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16565 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16566 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16567
16568 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16569 IEM_MC_PREPARE_FPU_USAGE();
16570 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16571 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16572 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16573 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16574 IEM_MC_ELSE()
16575 IEM_MC_IF_FCW_IM()
16576 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16577 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16578 IEM_MC_ENDIF();
16579 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16580 IEM_MC_ENDIF();
16581 IEM_MC_ADVANCE_RIP();
16582
16583 IEM_MC_END();
16584 return VINF_SUCCESS;
16585}
16586
16587
16588/** Opcode 0xdd !11/0. */
16589FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16590{
16591 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16592 IEM_MC_BEGIN(3, 2);
16593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16594 IEM_MC_LOCAL(uint16_t, u16Fsw);
16595 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16596 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16597 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16598
16599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16601 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16602 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16603
16604 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16605 IEM_MC_PREPARE_FPU_USAGE();
16606 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16607 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16608 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16609 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16610 IEM_MC_ELSE()
16611 IEM_MC_IF_FCW_IM()
16612 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16613 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16614 IEM_MC_ENDIF();
16615 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16616 IEM_MC_ENDIF();
16617 IEM_MC_ADVANCE_RIP();
16618
16619 IEM_MC_END();
16620 return VINF_SUCCESS;
16621}
16622
16623
16624
16625
16626/** Opcode 0xdd !11/0. */
16627FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16628{
16629 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16630 IEM_MC_BEGIN(3, 2);
16631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16632 IEM_MC_LOCAL(uint16_t, u16Fsw);
16633 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16634 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16635 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16636
16637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16641
16642 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16643 IEM_MC_PREPARE_FPU_USAGE();
16644 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16645 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16646 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16647 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16648 IEM_MC_ELSE()
16649 IEM_MC_IF_FCW_IM()
16650 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16651 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16652 IEM_MC_ENDIF();
16653 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16654 IEM_MC_ENDIF();
16655 IEM_MC_ADVANCE_RIP();
16656
16657 IEM_MC_END();
16658 return VINF_SUCCESS;
16659}
16660
16661
16662/** Opcode 0xdd !11/0. */
16663FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16664{
16665 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16666 IEM_MC_BEGIN(3, 0);
16667 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16668 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16669 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16673 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16674 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16675 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16676 IEM_MC_END();
16677 return VINF_SUCCESS;
16678}
16679
16680
16681/** Opcode 0xdd !11/0. */
16682FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16683{
16684 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16685 IEM_MC_BEGIN(3, 0);
16686 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16687 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16688 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16692 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16693 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16694 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16695 IEM_MC_END();
16696 return VINF_SUCCESS;
16697
16698}
16699
16700/** Opcode 0xdd !11/0. */
16701FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16702{
16703 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16704
16705 IEM_MC_BEGIN(0, 2);
16706 IEM_MC_LOCAL(uint16_t, u16Tmp);
16707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16708
16709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16712
16713 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16714 IEM_MC_FETCH_FSW(u16Tmp);
16715 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16716 IEM_MC_ADVANCE_RIP();
16717
16718/** @todo Debug / drop a hint to the verifier that things may differ
16719 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16720 * NT4SP1. (X86_FSW_PE) */
16721 IEM_MC_END();
16722 return VINF_SUCCESS;
16723}
16724
16725
16726/** Opcode 0xdd 11/0. */
16727FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16728{
16729 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16731 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16732 unmodified. */
16733
16734 IEM_MC_BEGIN(0, 0);
16735
16736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16738
16739 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16740 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16741 IEM_MC_UPDATE_FPU_OPCODE_IP();
16742
16743 IEM_MC_ADVANCE_RIP();
16744 IEM_MC_END();
16745 return VINF_SUCCESS;
16746}
16747
16748
16749/** Opcode 0xdd 11/1. */
16750FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16751{
16752 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16754
16755 IEM_MC_BEGIN(0, 2);
16756 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16757 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16760
16761 IEM_MC_PREPARE_FPU_USAGE();
16762 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16763 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16764 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16765 IEM_MC_ELSE()
16766 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16767 IEM_MC_ENDIF();
16768
16769 IEM_MC_ADVANCE_RIP();
16770 IEM_MC_END();
16771 return VINF_SUCCESS;
16772}
16773
16774
16775/** Opcode 0xdd 11/3. */
16776FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16777{
16778 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16779 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16780}
16781
16782
16783/** Opcode 0xdd 11/4. */
16784FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16785{
16786 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16787 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16788}
16789
16790
16791/** Opcode 0xdd. */
16792FNIEMOP_DEF(iemOp_EscF5)
16793{
16794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16795 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16797 {
16798 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16799 {
16800 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16801 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16802 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16803 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16804 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16805 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16806 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16807 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16809 }
16810 }
16811 else
16812 {
16813 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16814 {
16815 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16816 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16817 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16818 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16819 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16820 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16821 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16822 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16824 }
16825 }
16826}
16827
16828
16829/** Opcode 0xde 11/0. */
16830FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16831{
16832 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16833 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16834}
16835
16836
16837/** Opcode 0xde 11/0. */
16838FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16839{
16840 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16841 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16842}
16843
16844
16845/** Opcode 0xde 0xd9. */
16846FNIEMOP_DEF(iemOp_fcompp)
16847{
16848 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16849 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16850}
16851
16852
16853/** Opcode 0xde 11/4. */
16854FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16855{
16856 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16857 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16858}
16859
16860
16861/** Opcode 0xde 11/5. */
16862FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16863{
16864 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16865 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16866}
16867
16868
16869/** Opcode 0xde 11/6. */
16870FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16871{
16872 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16873 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16874}
16875
16876
16877/** Opcode 0xde 11/7. */
16878FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16879{
16880 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16881 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16882}
16883
16884
16885/**
16886 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16887 * the result in ST0.
16888 *
16889 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16890 */
16891FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16892{
16893 IEM_MC_BEGIN(3, 3);
16894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16895 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16896 IEM_MC_LOCAL(int16_t, i16Val2);
16897 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16898 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16899 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16900
16901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16903
16904 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16905 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16906 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16907
16908 IEM_MC_PREPARE_FPU_USAGE();
16909 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16910 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16911 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16912 IEM_MC_ELSE()
16913 IEM_MC_FPU_STACK_UNDERFLOW(0);
16914 IEM_MC_ENDIF();
16915 IEM_MC_ADVANCE_RIP();
16916
16917 IEM_MC_END();
16918 return VINF_SUCCESS;
16919}
16920
16921
16922/** Opcode 0xde !11/0. */
16923FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16924{
16925 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16926 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16927}
16928
16929
16930/** Opcode 0xde !11/1. */
16931FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16932{
16933 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16934 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16935}
16936
16937
16938/** Opcode 0xde !11/2. */
16939FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16940{
16941 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16942
16943 IEM_MC_BEGIN(3, 3);
16944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16945 IEM_MC_LOCAL(uint16_t, u16Fsw);
16946 IEM_MC_LOCAL(int16_t, i16Val2);
16947 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16948 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16949 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16950
16951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16953
16954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16956 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16957
16958 IEM_MC_PREPARE_FPU_USAGE();
16959 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16960 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16961 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16962 IEM_MC_ELSE()
16963 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16964 IEM_MC_ENDIF();
16965 IEM_MC_ADVANCE_RIP();
16966
16967 IEM_MC_END();
16968 return VINF_SUCCESS;
16969}
16970
16971
16972/** Opcode 0xde !11/3. */
16973FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16974{
16975 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16976
16977 IEM_MC_BEGIN(3, 3);
16978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16979 IEM_MC_LOCAL(uint16_t, u16Fsw);
16980 IEM_MC_LOCAL(int16_t, i16Val2);
16981 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16982 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16983 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16984
16985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16987
16988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16990 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16991
16992 IEM_MC_PREPARE_FPU_USAGE();
16993 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16994 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16995 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16996 IEM_MC_ELSE()
16997 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16998 IEM_MC_ENDIF();
16999 IEM_MC_ADVANCE_RIP();
17000
17001 IEM_MC_END();
17002 return VINF_SUCCESS;
17003}
17004
17005
17006/** Opcode 0xde !11/4. */
17007FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
17008{
17009 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
17010 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
17011}
17012
17013
17014/** Opcode 0xde !11/5. */
17015FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
17016{
17017 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
17018 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
17019}
17020
17021
17022/** Opcode 0xde !11/6. */
17023FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
17024{
17025 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
17026 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
17027}
17028
17029
17030/** Opcode 0xde !11/7. */
17031FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
17032{
17033 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
17034 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
17035}
17036
17037
17038/** Opcode 0xde. */
17039FNIEMOP_DEF(iemOp_EscF6)
17040{
17041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17042 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
17043 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17044 {
17045 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17046 {
17047 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
17048 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
17049 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
17050 case 3: if (bRm == 0xd9)
17051 return FNIEMOP_CALL(iemOp_fcompp);
17052 return IEMOP_RAISE_INVALID_OPCODE();
17053 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
17054 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
17055 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
17056 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
17057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17058 }
17059 }
17060 else
17061 {
17062 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17063 {
17064 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
17065 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
17066 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
17067 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
17068 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
17069 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
17070 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
17071 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
17072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17073 }
17074 }
17075}
17076
17077
17078/** Opcode 0xdf 11/0.
17079 * Undocument instruction, assumed to work like ffree + fincstp. */
17080FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17081{
17082 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17084
17085 IEM_MC_BEGIN(0, 0);
17086
17087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17089
17090 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17091 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17092 IEM_MC_FPU_STACK_INC_TOP();
17093 IEM_MC_UPDATE_FPU_OPCODE_IP();
17094
17095 IEM_MC_ADVANCE_RIP();
17096 IEM_MC_END();
17097 return VINF_SUCCESS;
17098}
17099
17100
17101/** Opcode 0xdf 0xe0. */
17102FNIEMOP_DEF(iemOp_fnstsw_ax)
17103{
17104 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17106
17107 IEM_MC_BEGIN(0, 1);
17108 IEM_MC_LOCAL(uint16_t, u16Tmp);
17109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17110 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17111 IEM_MC_FETCH_FSW(u16Tmp);
17112 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17113 IEM_MC_ADVANCE_RIP();
17114 IEM_MC_END();
17115 return VINF_SUCCESS;
17116}
17117
17118
17119/** Opcode 0xdf 11/5. */
17120FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17121{
17122 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17123 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17124}
17125
17126
17127/** Opcode 0xdf 11/6. */
17128FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17129{
17130 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17131 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17132}
17133
17134
17135/** Opcode 0xdf !11/0. */
17136FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17137{
17138 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17139
17140 IEM_MC_BEGIN(2, 3);
17141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17142 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17143 IEM_MC_LOCAL(int16_t, i16Val);
17144 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17145 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17146
17147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17149
17150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17151 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17152 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17153
17154 IEM_MC_PREPARE_FPU_USAGE();
17155 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17156 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17157 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17158 IEM_MC_ELSE()
17159 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17160 IEM_MC_ENDIF();
17161 IEM_MC_ADVANCE_RIP();
17162
17163 IEM_MC_END();
17164 return VINF_SUCCESS;
17165}
17166
17167
17168/** Opcode 0xdf !11/1. */
17169FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17170{
17171 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17172 IEM_MC_BEGIN(3, 2);
17173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17174 IEM_MC_LOCAL(uint16_t, u16Fsw);
17175 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17176 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17177 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17178
17179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17181 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17182 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17183
17184 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17185 IEM_MC_PREPARE_FPU_USAGE();
17186 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17187 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17188 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17189 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17190 IEM_MC_ELSE()
17191 IEM_MC_IF_FCW_IM()
17192 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17193 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17194 IEM_MC_ENDIF();
17195 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17196 IEM_MC_ENDIF();
17197 IEM_MC_ADVANCE_RIP();
17198
17199 IEM_MC_END();
17200 return VINF_SUCCESS;
17201}
17202
17203
17204/** Opcode 0xdf !11/2. */
17205FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17206{
17207 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17208 IEM_MC_BEGIN(3, 2);
17209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17210 IEM_MC_LOCAL(uint16_t, u16Fsw);
17211 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17212 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17213 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17214
17215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17217 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17218 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17219
17220 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17221 IEM_MC_PREPARE_FPU_USAGE();
17222 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17223 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17224 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17225 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17226 IEM_MC_ELSE()
17227 IEM_MC_IF_FCW_IM()
17228 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17229 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17230 IEM_MC_ENDIF();
17231 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17232 IEM_MC_ENDIF();
17233 IEM_MC_ADVANCE_RIP();
17234
17235 IEM_MC_END();
17236 return VINF_SUCCESS;
17237}
17238
17239
17240/** Opcode 0xdf !11/3. */
17241FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17242{
17243 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17244 IEM_MC_BEGIN(3, 2);
17245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17246 IEM_MC_LOCAL(uint16_t, u16Fsw);
17247 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17248 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17249 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17250
17251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17253 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17254 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17255
17256 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17257 IEM_MC_PREPARE_FPU_USAGE();
17258 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17259 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17260 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17261 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17262 IEM_MC_ELSE()
17263 IEM_MC_IF_FCW_IM()
17264 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17265 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17266 IEM_MC_ENDIF();
17267 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17268 IEM_MC_ENDIF();
17269 IEM_MC_ADVANCE_RIP();
17270
17271 IEM_MC_END();
17272 return VINF_SUCCESS;
17273}
17274
17275
17276/** Opcode 0xdf !11/4. */
17277FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17278
17279
17280/** Opcode 0xdf !11/5. */
17281FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17282{
17283 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17284
17285 IEM_MC_BEGIN(2, 3);
17286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17287 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17288 IEM_MC_LOCAL(int64_t, i64Val);
17289 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17290 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17291
17292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17294
17295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17297 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17298
17299 IEM_MC_PREPARE_FPU_USAGE();
17300 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17301 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17302 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17303 IEM_MC_ELSE()
17304 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17305 IEM_MC_ENDIF();
17306 IEM_MC_ADVANCE_RIP();
17307
17308 IEM_MC_END();
17309 return VINF_SUCCESS;
17310}
17311
17312
17313/** Opcode 0xdf !11/6. */
17314FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17315
17316
17317/** Opcode 0xdf !11/7. */
17318FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17319{
17320 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17321 IEM_MC_BEGIN(3, 2);
17322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17323 IEM_MC_LOCAL(uint16_t, u16Fsw);
17324 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17325 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17326 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17327
17328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17332
17333 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17334 IEM_MC_PREPARE_FPU_USAGE();
17335 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17336 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17337 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17338 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17339 IEM_MC_ELSE()
17340 IEM_MC_IF_FCW_IM()
17341 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17342 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17343 IEM_MC_ENDIF();
17344 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17345 IEM_MC_ENDIF();
17346 IEM_MC_ADVANCE_RIP();
17347
17348 IEM_MC_END();
17349 return VINF_SUCCESS;
17350}
17351
17352
17353/** Opcode 0xdf. */
17354FNIEMOP_DEF(iemOp_EscF7)
17355{
17356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17357 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17358 {
17359 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17360 {
17361 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17362 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17363 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17364 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17365 case 4: if (bRm == 0xe0)
17366 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17367 return IEMOP_RAISE_INVALID_OPCODE();
17368 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17369 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17370 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17372 }
17373 }
17374 else
17375 {
17376 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17377 {
17378 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17379 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17380 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17381 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17382 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17383 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17384 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17385 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17387 }
17388 }
17389}
17390
17391
17392/** Opcode 0xe0. */
17393FNIEMOP_DEF(iemOp_loopne_Jb)
17394{
17395 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17396 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17398 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17399
17400 switch (pVCpu->iem.s.enmEffAddrMode)
17401 {
17402 case IEMMODE_16BIT:
17403 IEM_MC_BEGIN(0,0);
17404 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17405 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17406 IEM_MC_REL_JMP_S8(i8Imm);
17407 } IEM_MC_ELSE() {
17408 IEM_MC_ADVANCE_RIP();
17409 } IEM_MC_ENDIF();
17410 IEM_MC_END();
17411 return VINF_SUCCESS;
17412
17413 case IEMMODE_32BIT:
17414 IEM_MC_BEGIN(0,0);
17415 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17416 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17417 IEM_MC_REL_JMP_S8(i8Imm);
17418 } IEM_MC_ELSE() {
17419 IEM_MC_ADVANCE_RIP();
17420 } IEM_MC_ENDIF();
17421 IEM_MC_END();
17422 return VINF_SUCCESS;
17423
17424 case IEMMODE_64BIT:
17425 IEM_MC_BEGIN(0,0);
17426 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17427 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17428 IEM_MC_REL_JMP_S8(i8Imm);
17429 } IEM_MC_ELSE() {
17430 IEM_MC_ADVANCE_RIP();
17431 } IEM_MC_ENDIF();
17432 IEM_MC_END();
17433 return VINF_SUCCESS;
17434
17435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17436 }
17437}
17438
17439
17440/** Opcode 0xe1. */
17441FNIEMOP_DEF(iemOp_loope_Jb)
17442{
17443 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17444 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17446 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17447
17448 switch (pVCpu->iem.s.enmEffAddrMode)
17449 {
17450 case IEMMODE_16BIT:
17451 IEM_MC_BEGIN(0,0);
17452 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17453 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17454 IEM_MC_REL_JMP_S8(i8Imm);
17455 } IEM_MC_ELSE() {
17456 IEM_MC_ADVANCE_RIP();
17457 } IEM_MC_ENDIF();
17458 IEM_MC_END();
17459 return VINF_SUCCESS;
17460
17461 case IEMMODE_32BIT:
17462 IEM_MC_BEGIN(0,0);
17463 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17464 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17465 IEM_MC_REL_JMP_S8(i8Imm);
17466 } IEM_MC_ELSE() {
17467 IEM_MC_ADVANCE_RIP();
17468 } IEM_MC_ENDIF();
17469 IEM_MC_END();
17470 return VINF_SUCCESS;
17471
17472 case IEMMODE_64BIT:
17473 IEM_MC_BEGIN(0,0);
17474 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17475 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17476 IEM_MC_REL_JMP_S8(i8Imm);
17477 } IEM_MC_ELSE() {
17478 IEM_MC_ADVANCE_RIP();
17479 } IEM_MC_ENDIF();
17480 IEM_MC_END();
17481 return VINF_SUCCESS;
17482
17483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17484 }
17485}
17486
17487
17488/** Opcode 0xe2. */
17489FNIEMOP_DEF(iemOp_loop_Jb)
17490{
17491 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17492 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17494 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17495
17496 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17497 * using the 32-bit operand size override. How can that be restarted? See
17498 * weird pseudo code in intel manual. */
17499 switch (pVCpu->iem.s.enmEffAddrMode)
17500 {
17501 case IEMMODE_16BIT:
17502 IEM_MC_BEGIN(0,0);
17503 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17504 {
17505 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17506 IEM_MC_IF_CX_IS_NZ() {
17507 IEM_MC_REL_JMP_S8(i8Imm);
17508 } IEM_MC_ELSE() {
17509 IEM_MC_ADVANCE_RIP();
17510 } IEM_MC_ENDIF();
17511 }
17512 else
17513 {
17514 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17515 IEM_MC_ADVANCE_RIP();
17516 }
17517 IEM_MC_END();
17518 return VINF_SUCCESS;
17519
17520 case IEMMODE_32BIT:
17521 IEM_MC_BEGIN(0,0);
17522 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17523 {
17524 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17525 IEM_MC_IF_ECX_IS_NZ() {
17526 IEM_MC_REL_JMP_S8(i8Imm);
17527 } IEM_MC_ELSE() {
17528 IEM_MC_ADVANCE_RIP();
17529 } IEM_MC_ENDIF();
17530 }
17531 else
17532 {
17533 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17534 IEM_MC_ADVANCE_RIP();
17535 }
17536 IEM_MC_END();
17537 return VINF_SUCCESS;
17538
17539 case IEMMODE_64BIT:
17540 IEM_MC_BEGIN(0,0);
17541 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17542 {
17543 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17544 IEM_MC_IF_RCX_IS_NZ() {
17545 IEM_MC_REL_JMP_S8(i8Imm);
17546 } IEM_MC_ELSE() {
17547 IEM_MC_ADVANCE_RIP();
17548 } IEM_MC_ENDIF();
17549 }
17550 else
17551 {
17552 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17553 IEM_MC_ADVANCE_RIP();
17554 }
17555 IEM_MC_END();
17556 return VINF_SUCCESS;
17557
17558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17559 }
17560}
17561
17562
17563/** Opcode 0xe3. */
17564FNIEMOP_DEF(iemOp_jecxz_Jb)
17565{
17566 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17567 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17569 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17570
17571 switch (pVCpu->iem.s.enmEffAddrMode)
17572 {
17573 case IEMMODE_16BIT:
17574 IEM_MC_BEGIN(0,0);
17575 IEM_MC_IF_CX_IS_NZ() {
17576 IEM_MC_ADVANCE_RIP();
17577 } IEM_MC_ELSE() {
17578 IEM_MC_REL_JMP_S8(i8Imm);
17579 } IEM_MC_ENDIF();
17580 IEM_MC_END();
17581 return VINF_SUCCESS;
17582
17583 case IEMMODE_32BIT:
17584 IEM_MC_BEGIN(0,0);
17585 IEM_MC_IF_ECX_IS_NZ() {
17586 IEM_MC_ADVANCE_RIP();
17587 } IEM_MC_ELSE() {
17588 IEM_MC_REL_JMP_S8(i8Imm);
17589 } IEM_MC_ENDIF();
17590 IEM_MC_END();
17591 return VINF_SUCCESS;
17592
17593 case IEMMODE_64BIT:
17594 IEM_MC_BEGIN(0,0);
17595 IEM_MC_IF_RCX_IS_NZ() {
17596 IEM_MC_ADVANCE_RIP();
17597 } IEM_MC_ELSE() {
17598 IEM_MC_REL_JMP_S8(i8Imm);
17599 } IEM_MC_ENDIF();
17600 IEM_MC_END();
17601 return VINF_SUCCESS;
17602
17603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17604 }
17605}
17606
17607
17608/** Opcode 0xe4 */
17609FNIEMOP_DEF(iemOp_in_AL_Ib)
17610{
17611 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17612 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17614 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17615}
17616
17617
17618/** Opcode 0xe5 */
17619FNIEMOP_DEF(iemOp_in_eAX_Ib)
17620{
17621 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17622 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17624 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17625}
17626
17627
17628/** Opcode 0xe6 */
17629FNIEMOP_DEF(iemOp_out_Ib_AL)
17630{
17631 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17632 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17634 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17635}
17636
17637
17638/** Opcode 0xe7 */
17639FNIEMOP_DEF(iemOp_out_Ib_eAX)
17640{
17641 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17642 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17644 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17645}
17646
17647
17648/** Opcode 0xe8. */
17649FNIEMOP_DEF(iemOp_call_Jv)
17650{
17651 IEMOP_MNEMONIC(call_Jv, "call Jv");
17652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17653 switch (pVCpu->iem.s.enmEffOpSize)
17654 {
17655 case IEMMODE_16BIT:
17656 {
17657 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17658 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17659 }
17660
17661 case IEMMODE_32BIT:
17662 {
17663 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17664 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17665 }
17666
17667 case IEMMODE_64BIT:
17668 {
17669 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17670 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17671 }
17672
17673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17674 }
17675}
17676
17677
17678/** Opcode 0xe9. */
17679FNIEMOP_DEF(iemOp_jmp_Jv)
17680{
17681 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17683 switch (pVCpu->iem.s.enmEffOpSize)
17684 {
17685 case IEMMODE_16BIT:
17686 {
17687 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17688 IEM_MC_BEGIN(0, 0);
17689 IEM_MC_REL_JMP_S16(i16Imm);
17690 IEM_MC_END();
17691 return VINF_SUCCESS;
17692 }
17693
17694 case IEMMODE_64BIT:
17695 case IEMMODE_32BIT:
17696 {
17697 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17698 IEM_MC_BEGIN(0, 0);
17699 IEM_MC_REL_JMP_S32(i32Imm);
17700 IEM_MC_END();
17701 return VINF_SUCCESS;
17702 }
17703
17704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17705 }
17706}
17707
17708
17709/** Opcode 0xea. */
17710FNIEMOP_DEF(iemOp_jmp_Ap)
17711{
17712 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17713 IEMOP_HLP_NO_64BIT();
17714
17715 /* Decode the far pointer address and pass it on to the far call C implementation. */
17716 uint32_t offSeg;
17717 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17718 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17719 else
17720 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17721 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17723 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17724}
17725
17726
17727/** Opcode 0xeb. */
17728FNIEMOP_DEF(iemOp_jmp_Jb)
17729{
17730 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17731 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17733 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17734
17735 IEM_MC_BEGIN(0, 0);
17736 IEM_MC_REL_JMP_S8(i8Imm);
17737 IEM_MC_END();
17738 return VINF_SUCCESS;
17739}
17740
17741
17742/** Opcode 0xec */
17743FNIEMOP_DEF(iemOp_in_AL_DX)
17744{
17745 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17747 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17748}
17749
17750
17751/** Opcode 0xed */
17752FNIEMOP_DEF(iemOp_eAX_DX)
17753{
17754 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17756 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17757}
17758
17759
17760/** Opcode 0xee */
17761FNIEMOP_DEF(iemOp_out_DX_AL)
17762{
17763 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17765 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17766}
17767
17768
17769/** Opcode 0xef */
17770FNIEMOP_DEF(iemOp_out_DX_eAX)
17771{
17772 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17774 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17775}
17776
17777
17778/** Opcode 0xf0. */
17779FNIEMOP_DEF(iemOp_lock)
17780{
17781 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17782 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17783
17784 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17785 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17786}
17787
17788
17789/** Opcode 0xf1. */
17790FNIEMOP_DEF(iemOp_int_1)
17791{
17792 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17793 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17794 /** @todo testcase! */
17795 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17796}
17797
17798
17799/** Opcode 0xf2. */
17800FNIEMOP_DEF(iemOp_repne)
17801{
17802 /* This overrides any previous REPE prefix. */
17803 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17804 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17805 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17806
17807 /* For the 4 entry opcode tables, REPNZ overrides any previous
17808 REPZ and operand size prefixes. */
17809 pVCpu->iem.s.idxPrefix = 3;
17810
17811 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17812 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17813}
17814
17815
17816/** Opcode 0xf3. */
17817FNIEMOP_DEF(iemOp_repe)
17818{
17819 /* This overrides any previous REPNE prefix. */
17820 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17821 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17822 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17823
17824 /* For the 4 entry opcode tables, REPNZ overrides any previous
17825 REPNZ and operand size prefixes. */
17826 pVCpu->iem.s.idxPrefix = 2;
17827
17828 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17829 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17830}
17831
17832
17833/** Opcode 0xf4. */
17834FNIEMOP_DEF(iemOp_hlt)
17835{
17836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17837 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17838}
17839
17840
17841/** Opcode 0xf5. */
17842FNIEMOP_DEF(iemOp_cmc)
17843{
17844 IEMOP_MNEMONIC(cmc, "cmc");
17845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17846 IEM_MC_BEGIN(0, 0);
17847 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17848 IEM_MC_ADVANCE_RIP();
17849 IEM_MC_END();
17850 return VINF_SUCCESS;
17851}
17852
17853
17854/**
17855 * Common implementation of 'inc/dec/not/neg Eb'.
17856 *
17857 * @param bRm The RM byte.
17858 * @param pImpl The instruction implementation.
17859 */
17860FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17861{
17862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17863 {
17864 /* register access */
17865 IEM_MC_BEGIN(2, 0);
17866 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17867 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17868 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17869 IEM_MC_REF_EFLAGS(pEFlags);
17870 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17871 IEM_MC_ADVANCE_RIP();
17872 IEM_MC_END();
17873 }
17874 else
17875 {
17876 /* memory access. */
17877 IEM_MC_BEGIN(2, 2);
17878 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17879 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17881
17882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17883 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17884 IEM_MC_FETCH_EFLAGS(EFlags);
17885 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17886 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17887 else
17888 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17889
17890 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17891 IEM_MC_COMMIT_EFLAGS(EFlags);
17892 IEM_MC_ADVANCE_RIP();
17893 IEM_MC_END();
17894 }
17895 return VINF_SUCCESS;
17896}
17897
17898
17899/**
17900 * Common implementation of 'inc/dec/not/neg Ev'.
17901 *
17902 * @param bRm The RM byte.
17903 * @param pImpl The instruction implementation.
17904 */
17905FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17906{
17907 /* Registers are handled by a common worker. */
17908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17909 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17910
17911 /* Memory we do here. */
17912 switch (pVCpu->iem.s.enmEffOpSize)
17913 {
17914 case IEMMODE_16BIT:
17915 IEM_MC_BEGIN(2, 2);
17916 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17917 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17919
17920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17921 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17922 IEM_MC_FETCH_EFLAGS(EFlags);
17923 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17924 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17925 else
17926 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17927
17928 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17929 IEM_MC_COMMIT_EFLAGS(EFlags);
17930 IEM_MC_ADVANCE_RIP();
17931 IEM_MC_END();
17932 return VINF_SUCCESS;
17933
17934 case IEMMODE_32BIT:
17935 IEM_MC_BEGIN(2, 2);
17936 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17937 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17939
17940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17941 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17942 IEM_MC_FETCH_EFLAGS(EFlags);
17943 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17944 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17945 else
17946 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17947
17948 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17949 IEM_MC_COMMIT_EFLAGS(EFlags);
17950 IEM_MC_ADVANCE_RIP();
17951 IEM_MC_END();
17952 return VINF_SUCCESS;
17953
17954 case IEMMODE_64BIT:
17955 IEM_MC_BEGIN(2, 2);
17956 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17957 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17959
17960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17961 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17962 IEM_MC_FETCH_EFLAGS(EFlags);
17963 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17964 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17965 else
17966 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17967
17968 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17969 IEM_MC_COMMIT_EFLAGS(EFlags);
17970 IEM_MC_ADVANCE_RIP();
17971 IEM_MC_END();
17972 return VINF_SUCCESS;
17973
17974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17975 }
17976}
17977
17978
17979/** Opcode 0xf6 /0. */
17980FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17981{
17982 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17983 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17984
17985 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17986 {
17987 /* register access */
17988 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17990
17991 IEM_MC_BEGIN(3, 0);
17992 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17993 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17995 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17996 IEM_MC_REF_EFLAGS(pEFlags);
17997 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17998 IEM_MC_ADVANCE_RIP();
17999 IEM_MC_END();
18000 }
18001 else
18002 {
18003 /* memory access. */
18004 IEM_MC_BEGIN(3, 2);
18005 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18006 IEM_MC_ARG(uint8_t, u8Src, 1);
18007 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18009
18010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
18011 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
18012 IEM_MC_ASSIGN(u8Src, u8Imm);
18013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18014 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18015 IEM_MC_FETCH_EFLAGS(EFlags);
18016 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
18017
18018 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
18019 IEM_MC_COMMIT_EFLAGS(EFlags);
18020 IEM_MC_ADVANCE_RIP();
18021 IEM_MC_END();
18022 }
18023 return VINF_SUCCESS;
18024}
18025
18026
18027/** Opcode 0xf7 /0. */
18028FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
18029{
18030 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
18031 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18032
18033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18034 {
18035 /* register access */
18036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18037 switch (pVCpu->iem.s.enmEffOpSize)
18038 {
18039 case IEMMODE_16BIT:
18040 {
18041 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18042 IEM_MC_BEGIN(3, 0);
18043 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18044 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
18045 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18046 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18047 IEM_MC_REF_EFLAGS(pEFlags);
18048 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18049 IEM_MC_ADVANCE_RIP();
18050 IEM_MC_END();
18051 return VINF_SUCCESS;
18052 }
18053
18054 case IEMMODE_32BIT:
18055 {
18056 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18057 IEM_MC_BEGIN(3, 0);
18058 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18059 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
18060 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18061 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18062 IEM_MC_REF_EFLAGS(pEFlags);
18063 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18064 /* No clearing the high dword here - test doesn't write back the result. */
18065 IEM_MC_ADVANCE_RIP();
18066 IEM_MC_END();
18067 return VINF_SUCCESS;
18068 }
18069
18070 case IEMMODE_64BIT:
18071 {
18072 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18073 IEM_MC_BEGIN(3, 0);
18074 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18075 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
18076 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18077 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18078 IEM_MC_REF_EFLAGS(pEFlags);
18079 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18080 IEM_MC_ADVANCE_RIP();
18081 IEM_MC_END();
18082 return VINF_SUCCESS;
18083 }
18084
18085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18086 }
18087 }
18088 else
18089 {
18090 /* memory access. */
18091 switch (pVCpu->iem.s.enmEffOpSize)
18092 {
18093 case IEMMODE_16BIT:
18094 {
18095 IEM_MC_BEGIN(3, 2);
18096 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18097 IEM_MC_ARG(uint16_t, u16Src, 1);
18098 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18100
18101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18102 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18103 IEM_MC_ASSIGN(u16Src, u16Imm);
18104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18105 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18106 IEM_MC_FETCH_EFLAGS(EFlags);
18107 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18108
18109 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18110 IEM_MC_COMMIT_EFLAGS(EFlags);
18111 IEM_MC_ADVANCE_RIP();
18112 IEM_MC_END();
18113 return VINF_SUCCESS;
18114 }
18115
18116 case IEMMODE_32BIT:
18117 {
18118 IEM_MC_BEGIN(3, 2);
18119 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18120 IEM_MC_ARG(uint32_t, u32Src, 1);
18121 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18123
18124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18125 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18126 IEM_MC_ASSIGN(u32Src, u32Imm);
18127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18128 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18129 IEM_MC_FETCH_EFLAGS(EFlags);
18130 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18131
18132 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18133 IEM_MC_COMMIT_EFLAGS(EFlags);
18134 IEM_MC_ADVANCE_RIP();
18135 IEM_MC_END();
18136 return VINF_SUCCESS;
18137 }
18138
18139 case IEMMODE_64BIT:
18140 {
18141 IEM_MC_BEGIN(3, 2);
18142 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18143 IEM_MC_ARG(uint64_t, u64Src, 1);
18144 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18146
18147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18148 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18149 IEM_MC_ASSIGN(u64Src, u64Imm);
18150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18151 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18152 IEM_MC_FETCH_EFLAGS(EFlags);
18153 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18154
18155 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18156 IEM_MC_COMMIT_EFLAGS(EFlags);
18157 IEM_MC_ADVANCE_RIP();
18158 IEM_MC_END();
18159 return VINF_SUCCESS;
18160 }
18161
18162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18163 }
18164 }
18165}
18166
18167
18168/** Opcode 0xf6 /4, /5, /6 and /7. */
18169FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18170{
18171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18172 {
18173 /* register access */
18174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18175 IEM_MC_BEGIN(3, 1);
18176 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18177 IEM_MC_ARG(uint8_t, u8Value, 1);
18178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18179 IEM_MC_LOCAL(int32_t, rc);
18180
18181 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18182 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18183 IEM_MC_REF_EFLAGS(pEFlags);
18184 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18185 IEM_MC_IF_LOCAL_IS_Z(rc) {
18186 IEM_MC_ADVANCE_RIP();
18187 } IEM_MC_ELSE() {
18188 IEM_MC_RAISE_DIVIDE_ERROR();
18189 } IEM_MC_ENDIF();
18190
18191 IEM_MC_END();
18192 }
18193 else
18194 {
18195 /* memory access. */
18196 IEM_MC_BEGIN(3, 2);
18197 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18198 IEM_MC_ARG(uint8_t, u8Value, 1);
18199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18201 IEM_MC_LOCAL(int32_t, rc);
18202
18203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18205 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18206 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18207 IEM_MC_REF_EFLAGS(pEFlags);
18208 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18209 IEM_MC_IF_LOCAL_IS_Z(rc) {
18210 IEM_MC_ADVANCE_RIP();
18211 } IEM_MC_ELSE() {
18212 IEM_MC_RAISE_DIVIDE_ERROR();
18213 } IEM_MC_ENDIF();
18214
18215 IEM_MC_END();
18216 }
18217 return VINF_SUCCESS;
18218}
18219
18220
18221/** Opcode 0xf7 /4, /5, /6 and /7. */
18222FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18223{
18224 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18225
18226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18227 {
18228 /* register access */
18229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18230 switch (pVCpu->iem.s.enmEffOpSize)
18231 {
18232 case IEMMODE_16BIT:
18233 {
18234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18235 IEM_MC_BEGIN(4, 1);
18236 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18237 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18238 IEM_MC_ARG(uint16_t, u16Value, 2);
18239 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18240 IEM_MC_LOCAL(int32_t, rc);
18241
18242 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18243 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18244 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18245 IEM_MC_REF_EFLAGS(pEFlags);
18246 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18247 IEM_MC_IF_LOCAL_IS_Z(rc) {
18248 IEM_MC_ADVANCE_RIP();
18249 } IEM_MC_ELSE() {
18250 IEM_MC_RAISE_DIVIDE_ERROR();
18251 } IEM_MC_ENDIF();
18252
18253 IEM_MC_END();
18254 return VINF_SUCCESS;
18255 }
18256
18257 case IEMMODE_32BIT:
18258 {
18259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18260 IEM_MC_BEGIN(4, 1);
18261 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18262 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18263 IEM_MC_ARG(uint32_t, u32Value, 2);
18264 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18265 IEM_MC_LOCAL(int32_t, rc);
18266
18267 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18268 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18269 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18270 IEM_MC_REF_EFLAGS(pEFlags);
18271 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18272 IEM_MC_IF_LOCAL_IS_Z(rc) {
18273 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18274 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18275 IEM_MC_ADVANCE_RIP();
18276 } IEM_MC_ELSE() {
18277 IEM_MC_RAISE_DIVIDE_ERROR();
18278 } IEM_MC_ENDIF();
18279
18280 IEM_MC_END();
18281 return VINF_SUCCESS;
18282 }
18283
18284 case IEMMODE_64BIT:
18285 {
18286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18287 IEM_MC_BEGIN(4, 1);
18288 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18289 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18290 IEM_MC_ARG(uint64_t, u64Value, 2);
18291 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18292 IEM_MC_LOCAL(int32_t, rc);
18293
18294 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18295 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18296 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18297 IEM_MC_REF_EFLAGS(pEFlags);
18298 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18299 IEM_MC_IF_LOCAL_IS_Z(rc) {
18300 IEM_MC_ADVANCE_RIP();
18301 } IEM_MC_ELSE() {
18302 IEM_MC_RAISE_DIVIDE_ERROR();
18303 } IEM_MC_ENDIF();
18304
18305 IEM_MC_END();
18306 return VINF_SUCCESS;
18307 }
18308
18309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18310 }
18311 }
18312 else
18313 {
18314 /* memory access. */
18315 switch (pVCpu->iem.s.enmEffOpSize)
18316 {
18317 case IEMMODE_16BIT:
18318 {
18319 IEM_MC_BEGIN(4, 2);
18320 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18321 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18322 IEM_MC_ARG(uint16_t, u16Value, 2);
18323 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18325 IEM_MC_LOCAL(int32_t, rc);
18326
18327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18329 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18330 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18331 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18332 IEM_MC_REF_EFLAGS(pEFlags);
18333 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18334 IEM_MC_IF_LOCAL_IS_Z(rc) {
18335 IEM_MC_ADVANCE_RIP();
18336 } IEM_MC_ELSE() {
18337 IEM_MC_RAISE_DIVIDE_ERROR();
18338 } IEM_MC_ENDIF();
18339
18340 IEM_MC_END();
18341 return VINF_SUCCESS;
18342 }
18343
18344 case IEMMODE_32BIT:
18345 {
18346 IEM_MC_BEGIN(4, 2);
18347 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18348 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18349 IEM_MC_ARG(uint32_t, u32Value, 2);
18350 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18352 IEM_MC_LOCAL(int32_t, rc);
18353
18354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18356 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18357 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18358 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18359 IEM_MC_REF_EFLAGS(pEFlags);
18360 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18361 IEM_MC_IF_LOCAL_IS_Z(rc) {
18362 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18363 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18364 IEM_MC_ADVANCE_RIP();
18365 } IEM_MC_ELSE() {
18366 IEM_MC_RAISE_DIVIDE_ERROR();
18367 } IEM_MC_ENDIF();
18368
18369 IEM_MC_END();
18370 return VINF_SUCCESS;
18371 }
18372
18373 case IEMMODE_64BIT:
18374 {
18375 IEM_MC_BEGIN(4, 2);
18376 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18377 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18378 IEM_MC_ARG(uint64_t, u64Value, 2);
18379 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18381 IEM_MC_LOCAL(int32_t, rc);
18382
18383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18385 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18386 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18387 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18388 IEM_MC_REF_EFLAGS(pEFlags);
18389 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18390 IEM_MC_IF_LOCAL_IS_Z(rc) {
18391 IEM_MC_ADVANCE_RIP();
18392 } IEM_MC_ELSE() {
18393 IEM_MC_RAISE_DIVIDE_ERROR();
18394 } IEM_MC_ENDIF();
18395
18396 IEM_MC_END();
18397 return VINF_SUCCESS;
18398 }
18399
18400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18401 }
18402 }
18403}
18404
18405/** Opcode 0xf6. */
18406FNIEMOP_DEF(iemOp_Grp3_Eb)
18407{
18408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18409 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18410 {
18411 case 0:
18412 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18413 case 1:
18414/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18415 return IEMOP_RAISE_INVALID_OPCODE();
18416 case 2:
18417 IEMOP_MNEMONIC(not_Eb, "not Eb");
18418 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18419 case 3:
18420 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18421 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18422 case 4:
18423 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18424 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18425 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18426 case 5:
18427 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18428 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18429 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18430 case 6:
18431 IEMOP_MNEMONIC(div_Eb, "div Eb");
18432 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18433 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18434 case 7:
18435 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18436 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18437 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18439 }
18440}
18441
18442
18443/** Opcode 0xf7. */
18444FNIEMOP_DEF(iemOp_Grp3_Ev)
18445{
18446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18447 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18448 {
18449 case 0:
18450 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18451 case 1:
18452/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18453 return IEMOP_RAISE_INVALID_OPCODE();
18454 case 2:
18455 IEMOP_MNEMONIC(not_Ev, "not Ev");
18456 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18457 case 3:
18458 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18459 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18460 case 4:
18461 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18462 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18463 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18464 case 5:
18465 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18466 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18467 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18468 case 6:
18469 IEMOP_MNEMONIC(div_Ev, "div Ev");
18470 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18471 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18472 case 7:
18473 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18474 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18475 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18477 }
18478}
18479
18480
18481/** Opcode 0xf8. */
18482FNIEMOP_DEF(iemOp_clc)
18483{
18484 IEMOP_MNEMONIC(clc, "clc");
18485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18486 IEM_MC_BEGIN(0, 0);
18487 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18488 IEM_MC_ADVANCE_RIP();
18489 IEM_MC_END();
18490 return VINF_SUCCESS;
18491}
18492
18493
18494/** Opcode 0xf9. */
18495FNIEMOP_DEF(iemOp_stc)
18496{
18497 IEMOP_MNEMONIC(stc, "stc");
18498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18499 IEM_MC_BEGIN(0, 0);
18500 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18501 IEM_MC_ADVANCE_RIP();
18502 IEM_MC_END();
18503 return VINF_SUCCESS;
18504}
18505
18506
18507/** Opcode 0xfa. */
18508FNIEMOP_DEF(iemOp_cli)
18509{
18510 IEMOP_MNEMONIC(cli, "cli");
18511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18512 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18513}
18514
18515
18516FNIEMOP_DEF(iemOp_sti)
18517{
18518 IEMOP_MNEMONIC(sti, "sti");
18519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18520 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18521}
18522
18523
18524/** Opcode 0xfc. */
18525FNIEMOP_DEF(iemOp_cld)
18526{
18527 IEMOP_MNEMONIC(cld, "cld");
18528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18529 IEM_MC_BEGIN(0, 0);
18530 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18531 IEM_MC_ADVANCE_RIP();
18532 IEM_MC_END();
18533 return VINF_SUCCESS;
18534}
18535
18536
18537/** Opcode 0xfd. */
18538FNIEMOP_DEF(iemOp_std)
18539{
18540 IEMOP_MNEMONIC(std, "std");
18541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18542 IEM_MC_BEGIN(0, 0);
18543 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18544 IEM_MC_ADVANCE_RIP();
18545 IEM_MC_END();
18546 return VINF_SUCCESS;
18547}
18548
18549
18550/** Opcode 0xfe. */
18551FNIEMOP_DEF(iemOp_Grp4)
18552{
18553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18554 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18555 {
18556 case 0:
18557 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18558 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18559 case 1:
18560 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18561 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18562 default:
18563 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18564 return IEMOP_RAISE_INVALID_OPCODE();
18565 }
18566}
18567
18568
18569/**
18570 * Opcode 0xff /2.
18571 * @param bRm The RM byte.
18572 */
18573FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18574{
18575 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18577
18578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18579 {
18580 /* The new RIP is taken from a register. */
18581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18582 switch (pVCpu->iem.s.enmEffOpSize)
18583 {
18584 case IEMMODE_16BIT:
18585 IEM_MC_BEGIN(1, 0);
18586 IEM_MC_ARG(uint16_t, u16Target, 0);
18587 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18588 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18589 IEM_MC_END()
18590 return VINF_SUCCESS;
18591
18592 case IEMMODE_32BIT:
18593 IEM_MC_BEGIN(1, 0);
18594 IEM_MC_ARG(uint32_t, u32Target, 0);
18595 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18596 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18597 IEM_MC_END()
18598 return VINF_SUCCESS;
18599
18600 case IEMMODE_64BIT:
18601 IEM_MC_BEGIN(1, 0);
18602 IEM_MC_ARG(uint64_t, u64Target, 0);
18603 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18604 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18605 IEM_MC_END()
18606 return VINF_SUCCESS;
18607
18608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18609 }
18610 }
18611 else
18612 {
18613 /* The new RIP is taken from a register. */
18614 switch (pVCpu->iem.s.enmEffOpSize)
18615 {
18616 case IEMMODE_16BIT:
18617 IEM_MC_BEGIN(1, 1);
18618 IEM_MC_ARG(uint16_t, u16Target, 0);
18619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18622 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18623 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18624 IEM_MC_END()
18625 return VINF_SUCCESS;
18626
18627 case IEMMODE_32BIT:
18628 IEM_MC_BEGIN(1, 1);
18629 IEM_MC_ARG(uint32_t, u32Target, 0);
18630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18633 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18634 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18635 IEM_MC_END()
18636 return VINF_SUCCESS;
18637
18638 case IEMMODE_64BIT:
18639 IEM_MC_BEGIN(1, 1);
18640 IEM_MC_ARG(uint64_t, u64Target, 0);
18641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18644 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18645 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18646 IEM_MC_END()
18647 return VINF_SUCCESS;
18648
18649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18650 }
18651 }
18652}
18653
18654typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18655
18656FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18657{
18658 /* Registers? How?? */
18659 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18660 { /* likely */ }
18661 else
18662 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18663
18664 /* Far pointer loaded from memory. */
18665 switch (pVCpu->iem.s.enmEffOpSize)
18666 {
18667 case IEMMODE_16BIT:
18668 IEM_MC_BEGIN(3, 1);
18669 IEM_MC_ARG(uint16_t, u16Sel, 0);
18670 IEM_MC_ARG(uint16_t, offSeg, 1);
18671 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18675 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18676 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18677 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18678 IEM_MC_END();
18679 return VINF_SUCCESS;
18680
18681 case IEMMODE_64BIT:
18682 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18683 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18684 * and call far qword [rsp] encodings. */
18685 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18686 {
18687 IEM_MC_BEGIN(3, 1);
18688 IEM_MC_ARG(uint16_t, u16Sel, 0);
18689 IEM_MC_ARG(uint64_t, offSeg, 1);
18690 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18694 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18695 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18696 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18697 IEM_MC_END();
18698 return VINF_SUCCESS;
18699 }
18700 /* AMD falls thru. */
18701 /* fall thru */
18702
18703 case IEMMODE_32BIT:
18704 IEM_MC_BEGIN(3, 1);
18705 IEM_MC_ARG(uint16_t, u16Sel, 0);
18706 IEM_MC_ARG(uint32_t, offSeg, 1);
18707 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18711 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18712 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18713 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18714 IEM_MC_END();
18715 return VINF_SUCCESS;
18716
18717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18718 }
18719}
18720
18721
18722/**
18723 * Opcode 0xff /3.
18724 * @param bRm The RM byte.
18725 */
18726FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18727{
18728 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18729 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18730}
18731
18732
18733/**
18734 * Opcode 0xff /4.
18735 * @param bRm The RM byte.
18736 */
18737FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18738{
18739 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18740 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18741
18742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18743 {
18744 /* The new RIP is taken from a register. */
18745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18746 switch (pVCpu->iem.s.enmEffOpSize)
18747 {
18748 case IEMMODE_16BIT:
18749 IEM_MC_BEGIN(0, 1);
18750 IEM_MC_LOCAL(uint16_t, u16Target);
18751 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18752 IEM_MC_SET_RIP_U16(u16Target);
18753 IEM_MC_END()
18754 return VINF_SUCCESS;
18755
18756 case IEMMODE_32BIT:
18757 IEM_MC_BEGIN(0, 1);
18758 IEM_MC_LOCAL(uint32_t, u32Target);
18759 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18760 IEM_MC_SET_RIP_U32(u32Target);
18761 IEM_MC_END()
18762 return VINF_SUCCESS;
18763
18764 case IEMMODE_64BIT:
18765 IEM_MC_BEGIN(0, 1);
18766 IEM_MC_LOCAL(uint64_t, u64Target);
18767 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18768 IEM_MC_SET_RIP_U64(u64Target);
18769 IEM_MC_END()
18770 return VINF_SUCCESS;
18771
18772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18773 }
18774 }
18775 else
18776 {
18777 /* The new RIP is taken from a memory location. */
18778 switch (pVCpu->iem.s.enmEffOpSize)
18779 {
18780 case IEMMODE_16BIT:
18781 IEM_MC_BEGIN(0, 2);
18782 IEM_MC_LOCAL(uint16_t, u16Target);
18783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18786 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18787 IEM_MC_SET_RIP_U16(u16Target);
18788 IEM_MC_END()
18789 return VINF_SUCCESS;
18790
18791 case IEMMODE_32BIT:
18792 IEM_MC_BEGIN(0, 2);
18793 IEM_MC_LOCAL(uint32_t, u32Target);
18794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18797 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18798 IEM_MC_SET_RIP_U32(u32Target);
18799 IEM_MC_END()
18800 return VINF_SUCCESS;
18801
18802 case IEMMODE_64BIT:
18803 IEM_MC_BEGIN(0, 2);
18804 IEM_MC_LOCAL(uint64_t, u64Target);
18805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18808 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18809 IEM_MC_SET_RIP_U64(u64Target);
18810 IEM_MC_END()
18811 return VINF_SUCCESS;
18812
18813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18814 }
18815 }
18816}
18817
18818
18819/**
18820 * Opcode 0xff /5.
18821 * @param bRm The RM byte.
18822 */
18823FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18824{
18825 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18826 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18827}
18828
18829
18830/**
18831 * Opcode 0xff /6.
18832 * @param bRm The RM byte.
18833 */
18834FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18835{
18836 IEMOP_MNEMONIC(push_Ev, "push Ev");
18837
18838 /* Registers are handled by a common worker. */
18839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18840 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18841
18842 /* Memory we do here. */
18843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18844 switch (pVCpu->iem.s.enmEffOpSize)
18845 {
18846 case IEMMODE_16BIT:
18847 IEM_MC_BEGIN(0, 2);
18848 IEM_MC_LOCAL(uint16_t, u16Src);
18849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18852 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18853 IEM_MC_PUSH_U16(u16Src);
18854 IEM_MC_ADVANCE_RIP();
18855 IEM_MC_END();
18856 return VINF_SUCCESS;
18857
18858 case IEMMODE_32BIT:
18859 IEM_MC_BEGIN(0, 2);
18860 IEM_MC_LOCAL(uint32_t, u32Src);
18861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18864 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18865 IEM_MC_PUSH_U32(u32Src);
18866 IEM_MC_ADVANCE_RIP();
18867 IEM_MC_END();
18868 return VINF_SUCCESS;
18869
18870 case IEMMODE_64BIT:
18871 IEM_MC_BEGIN(0, 2);
18872 IEM_MC_LOCAL(uint64_t, u64Src);
18873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18876 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18877 IEM_MC_PUSH_U64(u64Src);
18878 IEM_MC_ADVANCE_RIP();
18879 IEM_MC_END();
18880 return VINF_SUCCESS;
18881
18882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18883 }
18884}
18885
18886
18887/** Opcode 0xff. */
18888FNIEMOP_DEF(iemOp_Grp5)
18889{
18890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18891 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18892 {
18893 case 0:
18894 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18895 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18896 case 1:
18897 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18898 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18899 case 2:
18900 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18901 case 3:
18902 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18903 case 4:
18904 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18905 case 5:
18906 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18907 case 6:
18908 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18909 case 7:
18910 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18911 return IEMOP_RAISE_INVALID_OPCODE();
18912 }
18913 AssertFailedReturn(VERR_IEM_IPE_3);
18914}
18915
18916
18917
18918const PFNIEMOP g_apfnOneByteMap[256] =
18919{
18920 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18921 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18922 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18923 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18924 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18925 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18926 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18927 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18928 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18929 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18930 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18931 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18932 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18933 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18934 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18935 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18936 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18937 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18938 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18939 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18940 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18941 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18942 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18943 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18944 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18945 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18946 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18947 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18948 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18949 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18950 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18951 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18952 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18953 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18954 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18955 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18956 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18957 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18958 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18959 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18960 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18961 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18962 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18963 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18964 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18965 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18966 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18967 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18968 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18969 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18970 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18971 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18972 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18973 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18974 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18975 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18976 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18977 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18978 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18979 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18980 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18981 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18982 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18983 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18984};
18985
18986
18987/** @} */
18988
18989#ifdef _MSC_VER
18990# pragma warning(pop)
18991#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette