VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65750

Last change on this file since 65750 was 65750, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x6e split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 665.0 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65750 2017-02-13 08:22:13Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
560 {
561 RTGCPTR GCPtrEff;
562 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
563 if (rcStrict != VINF_SUCCESS)
564 return rcStrict;
565 }
566#endif
567 IEMOP_HLP_DONE_DECODING();
568 }
569 return IEMOP_RAISE_INVALID_OPCODE();
570}
571
572
573/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
574 * immediate. */
575FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
576{
577 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
578 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
579 {
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
581#ifndef TST_IEM_CHECK_MC
582 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
583 {
584 RTGCPTR GCPtrEff;
585 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
586 if (rcStrict != VINF_SUCCESS)
587 return rcStrict;
588 }
589#endif
590 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
591 IEMOP_HLP_DONE_DECODING();
592 }
593 return IEMOP_RAISE_INVALID_OPCODE();
594}
595
596
597/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
598 * sequence. */
599FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
600{
601 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
603 {
604 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
606#ifndef TST_IEM_CHECK_MC
607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
608 {
609 RTGCPTR GCPtrEff;
610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
611 if (rcStrict != VINF_SUCCESS)
612 return rcStrict;
613 }
614#endif
615 IEMOP_HLP_DONE_DECODING();
616 }
617 return IEMOP_RAISE_INVALID_OPCODE();
618}
619
620
621/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
622 * a 8-byte immediate. */
623FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
624{
625 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
626 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
627 {
628 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
630#ifndef TST_IEM_CHECK_MC
631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
632 {
633 RTGCPTR GCPtrEff;
634 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
635 if (rcStrict != VINF_SUCCESS)
636 return rcStrict;
637 }
638#endif
639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
640 IEMOP_HLP_DONE_DECODING();
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643}
644
645
646
647/** @name ..... opcodes.
648 *
649 * @{
650 */
651
652/** @} */
653
654
655/** @name Two byte opcodes (first byte 0x0f).
656 *
657 * @{
658 */
659
660/** Opcode 0x0f 0x00 /0. */
661FNIEMOPRM_DEF(iemOp_Grp6_sldt)
662{
663 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
664 IEMOP_HLP_MIN_286();
665 IEMOP_HLP_NO_REAL_OR_V86_MODE();
666
667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
668 {
669 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
670 switch (pVCpu->iem.s.enmEffOpSize)
671 {
672 case IEMMODE_16BIT:
673 IEM_MC_BEGIN(0, 1);
674 IEM_MC_LOCAL(uint16_t, u16Ldtr);
675 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
676 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
677 IEM_MC_ADVANCE_RIP();
678 IEM_MC_END();
679 break;
680
681 case IEMMODE_32BIT:
682 IEM_MC_BEGIN(0, 1);
683 IEM_MC_LOCAL(uint32_t, u32Ldtr);
684 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
685 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
686 IEM_MC_ADVANCE_RIP();
687 IEM_MC_END();
688 break;
689
690 case IEMMODE_64BIT:
691 IEM_MC_BEGIN(0, 1);
692 IEM_MC_LOCAL(uint64_t, u64Ldtr);
693 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
694 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
695 IEM_MC_ADVANCE_RIP();
696 IEM_MC_END();
697 break;
698
699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
700 }
701 }
702 else
703 {
704 IEM_MC_BEGIN(0, 2);
705 IEM_MC_LOCAL(uint16_t, u16Ldtr);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
708 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
709 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
710 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
711 IEM_MC_ADVANCE_RIP();
712 IEM_MC_END();
713 }
714 return VINF_SUCCESS;
715}
716
717
718/** Opcode 0x0f 0x00 /1. */
719FNIEMOPRM_DEF(iemOp_Grp6_str)
720{
721 IEMOP_MNEMONIC(str, "str Rv/Mw");
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 switch (pVCpu->iem.s.enmEffOpSize)
729 {
730 case IEMMODE_16BIT:
731 IEM_MC_BEGIN(0, 1);
732 IEM_MC_LOCAL(uint16_t, u16Tr);
733 IEM_MC_FETCH_TR_U16(u16Tr);
734 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
735 IEM_MC_ADVANCE_RIP();
736 IEM_MC_END();
737 break;
738
739 case IEMMODE_32BIT:
740 IEM_MC_BEGIN(0, 1);
741 IEM_MC_LOCAL(uint32_t, u32Tr);
742 IEM_MC_FETCH_TR_U32(u32Tr);
743 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
744 IEM_MC_ADVANCE_RIP();
745 IEM_MC_END();
746 break;
747
748 case IEMMODE_64BIT:
749 IEM_MC_BEGIN(0, 1);
750 IEM_MC_LOCAL(uint64_t, u64Tr);
751 IEM_MC_FETCH_TR_U64(u64Tr);
752 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
753 IEM_MC_ADVANCE_RIP();
754 IEM_MC_END();
755 break;
756
757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
758 }
759 }
760 else
761 {
762 IEM_MC_BEGIN(0, 2);
763 IEM_MC_LOCAL(uint16_t, u16Tr);
764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
766 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
767 IEM_MC_FETCH_TR_U16(u16Tr);
768 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 return VINF_SUCCESS;
773}
774
775
776/** Opcode 0x0f 0x00 /2. */
777FNIEMOPRM_DEF(iemOp_Grp6_lldt)
778{
779 IEMOP_MNEMONIC(lldt, "lldt Ew");
780 IEMOP_HLP_MIN_286();
781 IEMOP_HLP_NO_REAL_OR_V86_MODE();
782
783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
784 {
785 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
786 IEM_MC_BEGIN(1, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 0);
788 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 else
793 {
794 IEM_MC_BEGIN(1, 1);
795 IEM_MC_ARG(uint16_t, u16Sel, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
799 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
800 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
802 IEM_MC_END();
803 }
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x00 /3. */
809FNIEMOPRM_DEF(iemOp_Grp6_ltr)
810{
811 IEMOP_MNEMONIC(ltr, "ltr Ew");
812 IEMOP_HLP_MIN_286();
813 IEMOP_HLP_NO_REAL_OR_V86_MODE();
814
815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
816 {
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_BEGIN(1, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 0);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 else
825 {
826 IEM_MC_BEGIN(1, 1);
827 IEM_MC_ARG(uint16_t, u16Sel, 0);
828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
832 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
833 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
834 IEM_MC_END();
835 }
836 return VINF_SUCCESS;
837}
838
839
840/** Opcode 0x0f 0x00 /3. */
841FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
842{
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_NO_REAL_OR_V86_MODE();
845
846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
847 {
848 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Sel, 0);
851 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
852 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 else
857 {
858 IEM_MC_BEGIN(2, 1);
859 IEM_MC_ARG(uint16_t, u16Sel, 0);
860 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
863 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
864 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
866 IEM_MC_END();
867 }
868 return VINF_SUCCESS;
869}
870
871
872/** Opcode 0x0f 0x00 /4. */
873FNIEMOPRM_DEF(iemOp_Grp6_verr)
874{
875 IEMOP_MNEMONIC(verr, "verr Ew");
876 IEMOP_HLP_MIN_286();
877 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
878}
879
880
881/** Opcode 0x0f 0x00 /5. */
882FNIEMOPRM_DEF(iemOp_Grp6_verw)
883{
884 IEMOP_MNEMONIC(verw, "verw Ew");
885 IEMOP_HLP_MIN_286();
886 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
887}
888
889
890/**
891 * Group 6 jump table.
892 */
893IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
894{
895 iemOp_Grp6_sldt,
896 iemOp_Grp6_str,
897 iemOp_Grp6_lldt,
898 iemOp_Grp6_ltr,
899 iemOp_Grp6_verr,
900 iemOp_Grp6_verw,
901 iemOp_InvalidWithRM,
902 iemOp_InvalidWithRM
903};
904
905/** Opcode 0x0f 0x00. */
906FNIEMOP_DEF(iemOp_Grp6)
907{
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
910}
911
912
913/** Opcode 0x0f 0x01 /0. */
914FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
915{
916 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
917 IEMOP_HLP_MIN_286();
918 IEMOP_HLP_64BIT_OP_SIZE();
919 IEM_MC_BEGIN(2, 1);
920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
925 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 /0. */
932FNIEMOP_DEF(iemOp_Grp7_vmcall)
933{
934 IEMOP_BITCH_ABOUT_STUB();
935 return IEMOP_RAISE_INVALID_OPCODE();
936}
937
938
939/** Opcode 0x0f 0x01 /0. */
940FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
941{
942 IEMOP_BITCH_ABOUT_STUB();
943 return IEMOP_RAISE_INVALID_OPCODE();
944}
945
946
947/** Opcode 0x0f 0x01 /0. */
948FNIEMOP_DEF(iemOp_Grp7_vmresume)
949{
950 IEMOP_BITCH_ABOUT_STUB();
951 return IEMOP_RAISE_INVALID_OPCODE();
952}
953
954
955/** Opcode 0x0f 0x01 /0. */
956FNIEMOP_DEF(iemOp_Grp7_vmxoff)
957{
958 IEMOP_BITCH_ABOUT_STUB();
959 return IEMOP_RAISE_INVALID_OPCODE();
960}
961
962
963/** Opcode 0x0f 0x01 /1. */
964FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
965{
966 IEMOP_MNEMONIC(sidt, "sidt Ms");
967 IEMOP_HLP_MIN_286();
968 IEMOP_HLP_64BIT_OP_SIZE();
969 IEM_MC_BEGIN(2, 1);
970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
971 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
975 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
976 IEM_MC_END();
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /1. */
982FNIEMOP_DEF(iemOp_Grp7_monitor)
983{
984 IEMOP_MNEMONIC(monitor, "monitor");
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
986 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
987}
988
989
990/** Opcode 0x0f 0x01 /1. */
991FNIEMOP_DEF(iemOp_Grp7_mwait)
992{
993 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
996}
997
998
999/** Opcode 0x0f 0x01 /2. */
1000FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1001{
1002 IEMOP_MNEMONIC(lgdt, "lgdt");
1003 IEMOP_HLP_64BIT_OP_SIZE();
1004 IEM_MC_BEGIN(3, 1);
1005 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1006 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1007 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1011 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1012 IEM_MC_END();
1013 return VINF_SUCCESS;
1014}
1015
1016
1017/** Opcode 0x0f 0x01 0xd0. */
1018FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1019{
1020 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1021 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x0f 0x01 0xd1. */
1031FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1032{
1033 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1034 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1035 {
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1037 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1038 }
1039 return IEMOP_RAISE_INVALID_OPCODE();
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /3. */
1044FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1045{
1046 IEMOP_MNEMONIC(lidt, "lidt");
1047 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1048 ? IEMMODE_64BIT
1049 : pVCpu->iem.s.enmEffOpSize;
1050 IEM_MC_BEGIN(3, 1);
1051 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1057 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1058 IEM_MC_END();
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/** Opcode 0x0f 0x01 0xd8. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1065
1066/** Opcode 0x0f 0x01 0xd9. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1068
1069/** Opcode 0x0f 0x01 0xda. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1071
1072/** Opcode 0x0f 0x01 0xdb. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1074
1075/** Opcode 0x0f 0x01 0xdc. */
1076FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1077
1078/** Opcode 0x0f 0x01 0xdd. */
1079FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1080
1081/** Opcode 0x0f 0x01 0xde. */
1082FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1083
1084/** Opcode 0x0f 0x01 0xdf. */
1085FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1086
1087/** Opcode 0x0f 0x01 /4. */
1088FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1089{
1090 IEMOP_MNEMONIC(smsw, "smsw");
1091 IEMOP_HLP_MIN_286();
1092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1093 {
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 switch (pVCpu->iem.s.enmEffOpSize)
1096 {
1097 case IEMMODE_16BIT:
1098 IEM_MC_BEGIN(0, 1);
1099 IEM_MC_LOCAL(uint16_t, u16Tmp);
1100 IEM_MC_FETCH_CR0_U16(u16Tmp);
1101 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1102 { /* likely */ }
1103 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1104 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1105 else
1106 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1107 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 return VINF_SUCCESS;
1111
1112 case IEMMODE_32BIT:
1113 IEM_MC_BEGIN(0, 1);
1114 IEM_MC_LOCAL(uint32_t, u32Tmp);
1115 IEM_MC_FETCH_CR0_U32(u32Tmp);
1116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 return VINF_SUCCESS;
1120
1121 case IEMMODE_64BIT:
1122 IEM_MC_BEGIN(0, 1);
1123 IEM_MC_LOCAL(uint64_t, u64Tmp);
1124 IEM_MC_FETCH_CR0_U64(u64Tmp);
1125 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1126 IEM_MC_ADVANCE_RIP();
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129
1130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1131 }
1132 }
1133 else
1134 {
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(0, 2);
1137 IEM_MC_LOCAL(uint16_t, u16Tmp);
1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_FETCH_CR0_U16(u16Tmp);
1142 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1143 { /* likely */ }
1144 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1145 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1146 else
1147 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 return VINF_SUCCESS;
1152 }
1153}
1154
1155
1156/** Opcode 0x0f 0x01 /6. */
1157FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1158{
1159 /* The operand size is effectively ignored, all is 16-bit and only the
1160 lower 3-bits are used. */
1161 IEMOP_MNEMONIC(lmsw, "lmsw");
1162 IEMOP_HLP_MIN_286();
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(1, 0);
1167 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1168 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1170 IEM_MC_END();
1171 }
1172 else
1173 {
1174 IEM_MC_BEGIN(1, 1);
1175 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1179 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1180 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x01 /7. */
1188FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1189{
1190 IEMOP_MNEMONIC(invlpg, "invlpg");
1191 IEMOP_HLP_MIN_486();
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(1, 1);
1194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1196 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199}
1200
1201
1202/** Opcode 0x0f 0x01 /7. */
1203FNIEMOP_DEF(iemOp_Grp7_swapgs)
1204{
1205 IEMOP_MNEMONIC(swapgs, "swapgs");
1206 IEMOP_HLP_ONLY_64BIT();
1207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1209}
1210
1211
1212/** Opcode 0x0f 0x01 /7. */
1213FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1214{
1215 NOREF(pVCpu);
1216 IEMOP_BITCH_ABOUT_STUB();
1217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1218}
1219
1220
1221/** Opcode 0x0f 0x01. */
1222FNIEMOP_DEF(iemOp_Grp7)
1223{
1224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1226 {
1227 case 0:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1233 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1234 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1235 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1236 }
1237 return IEMOP_RAISE_INVALID_OPCODE();
1238
1239 case 1:
1240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1241 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1242 switch (bRm & X86_MODRM_RM_MASK)
1243 {
1244 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1245 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1246 }
1247 return IEMOP_RAISE_INVALID_OPCODE();
1248
1249 case 2:
1250 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1251 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1252 switch (bRm & X86_MODRM_RM_MASK)
1253 {
1254 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1255 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1256 }
1257 return IEMOP_RAISE_INVALID_OPCODE();
1258
1259 case 3:
1260 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1261 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1262 switch (bRm & X86_MODRM_RM_MASK)
1263 {
1264 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1265 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1266 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1267 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1268 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1269 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1270 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1271 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1273 }
1274
1275 case 4:
1276 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1277
1278 case 5:
1279 return IEMOP_RAISE_INVALID_OPCODE();
1280
1281 case 6:
1282 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1283
1284 case 7:
1285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1286 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1287 switch (bRm & X86_MODRM_RM_MASK)
1288 {
1289 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1290 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1291 }
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296}
1297
1298/** Opcode 0x0f 0x00 /3. */
1299FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1300{
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1303
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1307 switch (pVCpu->iem.s.enmEffOpSize)
1308 {
1309 case IEMMODE_16BIT:
1310 {
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Sel, 1);
1314 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1315
1316 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1318 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1319
1320 IEM_MC_END();
1321 return VINF_SUCCESS;
1322 }
1323
1324 case IEMMODE_32BIT:
1325 case IEMMODE_64BIT:
1326 {
1327 IEM_MC_BEGIN(3, 0);
1328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Sel, 1);
1330 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1331
1332 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1333 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1334 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1335
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1341 }
1342 }
1343 else
1344 {
1345 switch (pVCpu->iem.s.enmEffOpSize)
1346 {
1347 case IEMMODE_16BIT:
1348 {
1349 IEM_MC_BEGIN(3, 1);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Sel, 1);
1352 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1354
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1356 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1357
1358 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1361
1362 IEM_MC_END();
1363 return VINF_SUCCESS;
1364 }
1365
1366 case IEMMODE_32BIT:
1367 case IEMMODE_64BIT:
1368 {
1369 IEM_MC_BEGIN(3, 1);
1370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1371 IEM_MC_ARG(uint16_t, u16Sel, 1);
1372 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1377/** @todo testcase: make sure it's a 16-bit read. */
1378
1379 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1380 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1381 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1382
1383 IEM_MC_END();
1384 return VINF_SUCCESS;
1385 }
1386
1387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1388 }
1389 }
1390}
1391
1392
1393
1394/** Opcode 0x0f 0x02. */
1395FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1396{
1397 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1398 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1399}
1400
1401
1402/** Opcode 0x0f 0x03. */
1403FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1404{
1405 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1406 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1407}
1408
1409
1410/** Opcode 0x0f 0x05. */
1411FNIEMOP_DEF(iemOp_syscall)
1412{
1413 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1416}
1417
1418
1419/** Opcode 0x0f 0x06. */
1420FNIEMOP_DEF(iemOp_clts)
1421{
1422 IEMOP_MNEMONIC(clts, "clts");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1425}
1426
1427
1428/** Opcode 0x0f 0x07. */
1429FNIEMOP_DEF(iemOp_sysret)
1430{
1431 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1434}
1435
1436
1437/** Opcode 0x0f 0x08. */
1438FNIEMOP_STUB(iemOp_invd);
1439// IEMOP_HLP_MIN_486();
1440
1441
1442/** Opcode 0x0f 0x09. */
1443FNIEMOP_DEF(iemOp_wbinvd)
1444{
1445 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1446 IEMOP_HLP_MIN_486();
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS; /* ignore for now */
1453}
1454
1455
1456/** Opcode 0x0f 0x0b. */
1457FNIEMOP_DEF(iemOp_ud2)
1458{
1459 IEMOP_MNEMONIC(ud2, "ud2");
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462
1463/** Opcode 0x0f 0x0d. */
1464FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1465{
1466 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1468 {
1469 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1477 return IEMOP_RAISE_INVALID_OPCODE();
1478 }
1479
1480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1481 {
1482 case 2: /* Aliased to /0 for the time being. */
1483 case 4: /* Aliased to /0 for the time being. */
1484 case 5: /* Aliased to /0 for the time being. */
1485 case 6: /* Aliased to /0 for the time being. */
1486 case 7: /* Aliased to /0 for the time being. */
1487 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1488 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1489 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1491 }
1492
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 /* Currently a NOP. */
1498 NOREF(GCPtrEffSrc);
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 return VINF_SUCCESS;
1502}
1503
1504
1505/** Opcode 0x0f 0x0e. */
1506FNIEMOP_STUB(iemOp_femms);
1507
1508
1509/** Opcode 0x0f 0x0f 0x0c. */
1510FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x0d. */
1513FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x1c. */
1516FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x1d. */
1519FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x8a. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x8e. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x90. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x94. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1532
1533/** Opcode 0x0f 0x0f 0x96. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0x97. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0x9a. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0x9e. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xa0. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xa4. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xa6. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xa7. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xaa. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xae. */
1561FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xb0. */
1564FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xb4. */
1567FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1568
1569/** Opcode 0x0f 0x0f 0xb6. */
1570FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1571
1572/** Opcode 0x0f 0x0f 0xb7. */
1573FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1574
1575/** Opcode 0x0f 0x0f 0xbb. */
1576FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1577
1578/** Opcode 0x0f 0x0f 0xbf. */
1579FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1580
1581
1582/** Opcode 0x0f 0x0f. */
1583FNIEMOP_DEF(iemOp_3Dnow)
1584{
1585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1586 {
1587 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 }
1590
1591 /* This is pretty sparse, use switch instead of table. */
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 switch (b)
1594 {
1595 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1596 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1597 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1598 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1599 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1600 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1601 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1602 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1603 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1604 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1605 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1606 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1607 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1608 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1609 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1610 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1611 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1612 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1613 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1614 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1615 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1616 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1617 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1618 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622}
1623
1624
1625/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1626FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1627/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1628FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1629/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1630FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1631/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1632FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1633
1634
1635/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1636FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1637{
1638 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1646 IEM_MC_BEGIN(0, 0);
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1651 IEM_MC_ADVANCE_RIP();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 /*
1657 * Memory, register.
1658 */
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1679FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1680
1681/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1682FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1683
1684/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1685FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1686{
1687 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1690 {
1691 /*
1692 * Register, register.
1693 */
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697
1698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1700 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1701 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1702
1703 IEM_MC_ADVANCE_RIP();
1704 IEM_MC_END();
1705 }
1706 else
1707 {
1708 /*
1709 * Memory, register.
1710 */
1711 IEM_MC_BEGIN(0, 2);
1712 IEM_MC_LOCAL(uint64_t, uSrc);
1713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1714
1715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1719
1720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1722
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1732
1733/** Opcode 0x66 0x0f 0x12. */
1734FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1735
1736/** Opcode 0xf3 0x0f 0x12. */
1737FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1738
1739/** Opcode 0xf2 0x0f 0x12. */
1740FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1741
1742/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1743FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1744
1745/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1746FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1747{
1748 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752#if 0
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1757 IEM_MC_BEGIN(0, 1);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1761 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765#else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767#endif
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/* Opcode 0xf3 0x0f 0x13 - invalid */
1793/* Opcode 0xf2 0x0f 0x13 - invalid */
1794
1795/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1796FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x14 - invalid */
1800/* Opcode 0xf2 0x0f 0x14 - invalid */
1801/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1802FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1803/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1804FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1805/* Opcode 0xf3 0x0f 0x15 - invalid */
1806/* Opcode 0xf2 0x0f 0x15 - invalid */
1807/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1808FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1809/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1810FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1811/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1812FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1813/* Opcode 0xf2 0x0f 0x16 - invalid */
1814/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1815FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1816/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1817FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1818/* Opcode 0xf3 0x0f 0x17 - invalid */
1819/* Opcode 0xf2 0x0f 0x17 - invalid */
1820
1821
1822/** Opcode 0x0f 0x18. */
1823FNIEMOP_DEF(iemOp_prefetch_Grp16)
1824{
1825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1826 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1827 {
1828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1829 {
1830 case 4: /* Aliased to /0 for the time being according to AMD. */
1831 case 5: /* Aliased to /0 for the time being according to AMD. */
1832 case 6: /* Aliased to /0 for the time being according to AMD. */
1833 case 7: /* Aliased to /0 for the time being according to AMD. */
1834 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1835 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1836 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1837 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1839 }
1840
1841 IEM_MC_BEGIN(0, 1);
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 /* Currently a NOP. */
1846 NOREF(GCPtrEffSrc);
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 return VINF_SUCCESS;
1850 }
1851
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/** Opcode 0x0f 0x19..0x1f. */
1857FNIEMOP_DEF(iemOp_nop_Ev)
1858{
1859 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 IEM_MC_BEGIN(0, 1);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 /* Currently a NOP. */
1875 NOREF(GCPtrEffSrc);
1876 IEM_MC_ADVANCE_RIP();
1877 IEM_MC_END();
1878 }
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** Opcode 0x0f 0x20. */
1884FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1885{
1886 /* mod is ignored, as is operand size overrides. */
1887 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1888 IEMOP_HLP_MIN_386();
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1891 else
1892 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1893
1894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1895 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1897 {
1898 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1900 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1901 iCrReg |= 8;
1902 }
1903 switch (iCrReg)
1904 {
1905 case 0: case 2: case 3: case 4: case 8:
1906 break;
1907 default:
1908 return IEMOP_RAISE_INVALID_OPCODE();
1909 }
1910 IEMOP_HLP_DONE_DECODING();
1911
1912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1913}
1914
1915
1916/** Opcode 0x0f 0x21. */
1917FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1918{
1919 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1920 IEMOP_HLP_MIN_386();
1921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1924 return IEMOP_RAISE_INVALID_OPCODE();
1925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1926 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1927 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1928}
1929
1930
1931/** Opcode 0x0f 0x22. */
1932FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1933{
1934 /* mod is ignored, as is operand size overrides. */
1935 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1936 IEMOP_HLP_MIN_386();
1937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1938 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1939 else
1940 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1941
1942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1943 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1945 {
1946 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1948 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1949 iCrReg |= 8;
1950 }
1951 switch (iCrReg)
1952 {
1953 case 0: case 2: case 3: case 4: case 8:
1954 break;
1955 default:
1956 return IEMOP_RAISE_INVALID_OPCODE();
1957 }
1958 IEMOP_HLP_DONE_DECODING();
1959
1960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1961}
1962
1963
1964/** Opcode 0x0f 0x23. */
1965FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1966{
1967 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1968 IEMOP_HLP_MIN_386();
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1972 return IEMOP_RAISE_INVALID_OPCODE();
1973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1974 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1975 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1976}
1977
1978
1979/** Opcode 0x0f 0x24. */
1980FNIEMOP_DEF(iemOp_mov_Rd_Td)
1981{
1982 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1983 /** @todo works on 386 and 486. */
1984 /* The RM byte is not considered, see testcase. */
1985 return IEMOP_RAISE_INVALID_OPCODE();
1986}
1987
1988
1989/** Opcode 0x0f 0x26. */
1990FNIEMOP_DEF(iemOp_mov_Td_Rd)
1991{
1992 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1993 /** @todo works on 386 and 486. */
1994 /* The RM byte is not considered, see testcase. */
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
2000FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 /*
2007 * Register, register.
2008 */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_BEGIN(0, 0);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2042FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2043{
2044 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2056 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081}
2082
2083/* Opcode 0xf3 0x0f 0x28 - invalid */
2084/* Opcode 0xf2 0x0f 0x28 - invalid */
2085
2086/** Opcode 0x0f 0x29. */
2087FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2088{
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2091 else
2092 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2095 {
2096 /*
2097 * Register, register.
2098 */
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2100 IEM_MC_BEGIN(0, 0);
2101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 else
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2106 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2107 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Memory, register.
2115 */
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2124 else
2125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2127
2128 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2129 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135}
2136
2137
2138/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2139FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2140/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2141FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2142/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2143FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2144/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2145FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2146
2147
2148/** Opcode 0x0f 0x2b. */
2149FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2150{
2151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2152 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2153 else
2154 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2157 {
2158 /*
2159 * memory, register.
2160 */
2161 IEM_MC_BEGIN(0, 2);
2162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 else
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172
2173 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2187FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2188/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2189FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2190/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2191FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2192/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2193FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2194
2195/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2196FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2197/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2198FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2199/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2200FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2201/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2202FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2203
2204/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2205FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2206/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2207FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2208/* Opcode 0xf3 0x0f 0x2e - invalid */
2209/* Opcode 0xf2 0x0f 0x2e - invalid */
2210
2211/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2212FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2213/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2214FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2215/* Opcode 0xf3 0x0f 0x2f - invalid */
2216/* Opcode 0xf2 0x0f 0x2f - invalid */
2217
2218/** Opcode 0x0f 0x30. */
2219FNIEMOP_DEF(iemOp_wrmsr)
2220{
2221 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2224}
2225
2226
2227/** Opcode 0x0f 0x31. */
2228FNIEMOP_DEF(iemOp_rdtsc)
2229{
2230 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2233}
2234
2235
2236/** Opcode 0x0f 0x33. */
2237FNIEMOP_DEF(iemOp_rdmsr)
2238{
2239 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2242}
2243
2244
2245/** Opcode 0x0f 0x34. */
2246FNIEMOP_STUB(iemOp_rdpmc);
2247/** Opcode 0x0f 0x34. */
2248FNIEMOP_STUB(iemOp_sysenter);
2249/** Opcode 0x0f 0x35. */
2250FNIEMOP_STUB(iemOp_sysexit);
2251/** Opcode 0x0f 0x37. */
2252FNIEMOP_STUB(iemOp_getsec);
2253/** Opcode 0x0f 0x38. */
2254FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2255/** Opcode 0x0f 0x3a. */
2256FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2257
2258
2259/**
2260 * Implements a conditional move.
2261 *
2262 * Wish there was an obvious way to do this where we could share and reduce
2263 * code bloat.
2264 *
2265 * @param a_Cnd The conditional "microcode" operation.
2266 */
2267#define CMOV_X(a_Cnd) \
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2270 { \
2271 switch (pVCpu->iem.s.enmEffOpSize) \
2272 { \
2273 case IEMMODE_16BIT: \
2274 IEM_MC_BEGIN(0, 1); \
2275 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2276 a_Cnd { \
2277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 1); \
2286 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2287 a_Cnd { \
2288 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2289 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2290 } IEM_MC_ELSE() { \
2291 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2292 } IEM_MC_ENDIF(); \
2293 IEM_MC_ADVANCE_RIP(); \
2294 IEM_MC_END(); \
2295 return VINF_SUCCESS; \
2296 \
2297 case IEMMODE_64BIT: \
2298 IEM_MC_BEGIN(0, 1); \
2299 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2300 a_Cnd { \
2301 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2303 } IEM_MC_ENDIF(); \
2304 IEM_MC_ADVANCE_RIP(); \
2305 IEM_MC_END(); \
2306 return VINF_SUCCESS; \
2307 \
2308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2309 } \
2310 } \
2311 else \
2312 { \
2313 switch (pVCpu->iem.s.enmEffOpSize) \
2314 { \
2315 case IEMMODE_16BIT: \
2316 IEM_MC_BEGIN(0, 2); \
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2318 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2320 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2321 a_Cnd { \
2322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2323 } IEM_MC_ENDIF(); \
2324 IEM_MC_ADVANCE_RIP(); \
2325 IEM_MC_END(); \
2326 return VINF_SUCCESS; \
2327 \
2328 case IEMMODE_32BIT: \
2329 IEM_MC_BEGIN(0, 2); \
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2331 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2333 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2334 a_Cnd { \
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2336 } IEM_MC_ELSE() { \
2337 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2338 } IEM_MC_ENDIF(); \
2339 IEM_MC_ADVANCE_RIP(); \
2340 IEM_MC_END(); \
2341 return VINF_SUCCESS; \
2342 \
2343 case IEMMODE_64BIT: \
2344 IEM_MC_BEGIN(0, 2); \
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2346 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2349 a_Cnd { \
2350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2351 } IEM_MC_ENDIF(); \
2352 IEM_MC_ADVANCE_RIP(); \
2353 IEM_MC_END(); \
2354 return VINF_SUCCESS; \
2355 \
2356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2357 } \
2358 } do {} while (0)
2359
2360
2361
2362/** Opcode 0x0f 0x40. */
2363FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2364{
2365 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2367}
2368
2369
2370/** Opcode 0x0f 0x41. */
2371FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2372{
2373 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2375}
2376
2377
2378/** Opcode 0x0f 0x42. */
2379FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2380{
2381 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2382 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2383}
2384
2385
2386/** Opcode 0x0f 0x43. */
2387FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2388{
2389 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2390 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2391}
2392
2393
2394/** Opcode 0x0f 0x44. */
2395FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2396{
2397 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2399}
2400
2401
2402/** Opcode 0x0f 0x45. */
2403FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2404{
2405 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2407}
2408
2409
2410/** Opcode 0x0f 0x46. */
2411FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2412{
2413 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2414 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2415}
2416
2417
2418/** Opcode 0x0f 0x47. */
2419FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2420{
2421 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2422 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2423}
2424
2425
2426/** Opcode 0x0f 0x48. */
2427FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2428{
2429 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2430 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2431}
2432
2433
2434/** Opcode 0x0f 0x49. */
2435FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2436{
2437 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2438 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2439}
2440
2441
2442/** Opcode 0x0f 0x4a. */
2443FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2446 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2447}
2448
2449
2450/** Opcode 0x0f 0x4b. */
2451FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2452{
2453 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2455}
2456
2457
2458/** Opcode 0x0f 0x4c. */
2459FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2460{
2461 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2462 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2463}
2464
2465
2466/** Opcode 0x0f 0x4d. */
2467FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2468{
2469 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2470 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2471}
2472
2473
2474/** Opcode 0x0f 0x4e. */
2475FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2476{
2477 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2478 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2479}
2480
2481
2482/** Opcode 0x0f 0x4f. */
2483FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2484{
2485 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2486 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2487}
2488
2489#undef CMOV_X
2490
2491/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2492FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2493/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2494FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2495/* Opcode 0xf3 0x0f 0x50 - invalid */
2496/* Opcode 0xf2 0x0f 0x50 - invalid */
2497
2498/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2499FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2500/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2501FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2502/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2503FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2504/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2505FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2506
2507/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2508FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2509/* Opcode 0x66 0x0f 0x52 - invalid */
2510/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2511FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2512/* Opcode 0xf2 0x0f 0x52 - invalid */
2513
2514/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2515FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2516/* Opcode 0x66 0x0f 0x53 - invalid */
2517/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2518FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2519/* Opcode 0xf2 0x0f 0x53 - invalid */
2520
2521/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2522FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2523/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2524FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2525/* Opcode 0xf3 0x0f 0x54 - invalid */
2526/* Opcode 0xf2 0x0f 0x54 - invalid */
2527
2528/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2529FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2530/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2531FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2532/* Opcode 0xf3 0x0f 0x55 - invalid */
2533/* Opcode 0xf2 0x0f 0x55 - invalid */
2534
2535/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2536FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2537/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2538FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2539/* Opcode 0xf3 0x0f 0x56 - invalid */
2540/* Opcode 0xf2 0x0f 0x56 - invalid */
2541
2542/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2543FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2544/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2545FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2546/* Opcode 0xf3 0x0f 0x57 - invalid */
2547/* Opcode 0xf2 0x0f 0x57 - invalid */
2548
2549/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2568FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2569/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2570FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2571/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2572FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2573/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2574FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2575
2576/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2577FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2578/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2579FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2580/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2581FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2582/* Opcode 0xf2 0x0f 0x5b - invalid */
2583
2584/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2585FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2586/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2587FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2588/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2589FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2590/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2591FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2592
2593/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2594FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2595/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2596FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2597/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2598FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2599/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2600FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2601
2602/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2603FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2604/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2605FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2606/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2607FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2608/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2609FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2610
2611/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2612FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2613/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2614FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2615/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2616FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2617/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2618FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2619
2620/**
2621 * Common worker for MMX instructions on the forms:
2622 * pxxxx mm1, mm2/mem32
2623 *
2624 * The 2nd operand is the first half of a register, which in the memory case
2625 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2626 * memory accessed for MMX.
2627 *
2628 * Exceptions type 4.
2629 */
2630FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 {
2635 /*
2636 * Register, register.
2637 */
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2639 IEM_MC_BEGIN(2, 0);
2640 IEM_MC_ARG(uint128_t *, pDst, 0);
2641 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2643 IEM_MC_PREPARE_SSE_USAGE();
2644 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2645 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2646 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2647 IEM_MC_ADVANCE_RIP();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 /*
2653 * Register, memory.
2654 */
2655 IEM_MC_BEGIN(2, 2);
2656 IEM_MC_ARG(uint128_t *, pDst, 0);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2660
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665
2666 IEM_MC_PREPARE_SSE_USAGE();
2667 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2668 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2669
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674}
2675
2676
2677/**
2678 * Common worker for SSE2 instructions on the forms:
2679 * pxxxx xmm1, xmm2/mem128
2680 *
2681 * The 2nd operand is the first half of a register, which in the memory case
2682 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2683 * memory accessed for MMX.
2684 *
2685 * Exceptions type 4.
2686 */
2687FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2688{
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (!pImpl->pfnU64)
2691 return IEMOP_RAISE_INVALID_OPCODE();
2692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2693 {
2694 /*
2695 * Register, register.
2696 */
2697 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2698 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(2, 0);
2701 IEM_MC_ARG(uint64_t *, pDst, 0);
2702 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_PREPARE_FPU_USAGE();
2705 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2706 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2707 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * Register, memory.
2715 */
2716 IEM_MC_BEGIN(2, 2);
2717 IEM_MC_ARG(uint64_t *, pDst, 0);
2718 IEM_MC_LOCAL(uint32_t, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2721
2722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2725 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726
2727 IEM_MC_PREPARE_FPU_USAGE();
2728 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2729 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2730
2731 IEM_MC_ADVANCE_RIP();
2732 IEM_MC_END();
2733 }
2734 return VINF_SUCCESS;
2735}
2736
2737
2738/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2739FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2740{
2741 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2742 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2743}
2744
2745/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2746FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2747{
2748 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2749 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2750}
2751
2752/* Opcode 0xf3 0x0f 0x60 - invalid */
2753
2754
2755/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2756FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2757{
2758 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2759 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2760}
2761
2762/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2763FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2764{
2765 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2766 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2767}
2768
2769/* Opcode 0xf3 0x0f 0x61 - invalid */
2770
2771
2772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2774{
2775 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2777}
2778
2779/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2780FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2781{
2782 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2783 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2784}
2785
2786/* Opcode 0xf3 0x0f 0x62 - invalid */
2787
2788
2789
2790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2791FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x63 - invalid */
2795
2796/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x64 - invalid */
2801
2802/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2805FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2806/* Opcode 0xf3 0x0f 0x65 - invalid */
2807
2808/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2809FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2811FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2812/* Opcode 0xf3 0x0f 0x66 - invalid */
2813
2814/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2815FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2816/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2818/* Opcode 0xf3 0x0f 0x67 - invalid */
2819
2820
2821/**
2822 * Common worker for MMX instructions on the form:
2823 * pxxxx mm1, mm2/mem64
2824 *
2825 * The 2nd operand is the second half of a register, which in the memory case
2826 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2827 * where it may read the full 128 bits or only the upper 64 bits.
2828 *
2829 * Exceptions type 4.
2830 */
2831FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2832{
2833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2834 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /*
2838 * Register, register.
2839 */
2840 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2841 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEM_MC_BEGIN(2, 0);
2844 IEM_MC_ARG(uint64_t *, pDst, 0);
2845 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2846 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2847 IEM_MC_PREPARE_FPU_USAGE();
2848 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2849 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2850 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 else
2855 {
2856 /*
2857 * Register, memory.
2858 */
2859 IEM_MC_BEGIN(2, 2);
2860 IEM_MC_ARG(uint64_t *, pDst, 0);
2861 IEM_MC_LOCAL(uint64_t, uSrc);
2862 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2864
2865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2868 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2869
2870 IEM_MC_PREPARE_FPU_USAGE();
2871 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2872 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2873
2874 IEM_MC_ADVANCE_RIP();
2875 IEM_MC_END();
2876 }
2877 return VINF_SUCCESS;
2878}
2879
2880
2881/**
2882 * Common worker for SSE2 instructions on the form:
2883 * pxxxx xmm1, xmm2/mem128
2884 *
2885 * The 2nd operand is the second half of a register, which in the memory case
2886 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2887 * where it may read the full 128 bits or only the upper 64 bits.
2888 *
2889 * Exceptions type 4.
2890 */
2891FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2892{
2893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2895 {
2896 /*
2897 * Register, register.
2898 */
2899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2900 IEM_MC_BEGIN(2, 0);
2901 IEM_MC_ARG(uint128_t *, pDst, 0);
2902 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2904 IEM_MC_PREPARE_SSE_USAGE();
2905 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2906 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2907 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2908 IEM_MC_ADVANCE_RIP();
2909 IEM_MC_END();
2910 }
2911 else
2912 {
2913 /*
2914 * Register, memory.
2915 */
2916 IEM_MC_BEGIN(2, 2);
2917 IEM_MC_ARG(uint128_t *, pDst, 0);
2918 IEM_MC_LOCAL(uint128_t, uSrc);
2919 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2921
2922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2924 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2925 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2926
2927 IEM_MC_PREPARE_SSE_USAGE();
2928 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2929 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2939FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2940{
2941 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2942 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2943}
2944
2945/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2946FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2947{
2948 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2949 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2950}
2951/* Opcode 0xf3 0x0f 0x68 - invalid */
2952
2953
2954/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2955FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2956{
2957 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2958 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2959}
2960
2961/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2962FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2963{
2964 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2965 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2966
2967}
2968/* Opcode 0xf3 0x0f 0x69 - invalid */
2969
2970
2971/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2972FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2973{
2974 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2975 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2976}
2977
2978/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2979FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2980{
2981 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2982 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2983}
2984/* Opcode 0xf3 0x0f 0x6a - invalid */
2985
2986
2987/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2988FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2989/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2990FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2991/* Opcode 0xf3 0x0f 0x6b - invalid */
2992
2993
2994/* Opcode 0x0f 0x6c - invalid */
2995
2996/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2997FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2998{
2999 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
3000 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3001}
3002
3003/* Opcode 0xf3 0x0f 0x6c - invalid */
3004/* Opcode 0xf2 0x0f 0x6c - invalid */
3005
3006
3007/* Opcode 0x0f 0x6d - invalid */
3008
3009/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
3010FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3011{
3012 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
3013 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3014}
3015
3016/* Opcode 0xf3 0x0f 0x6d - invalid */
3017
3018
3019/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3020FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3021{
3022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3023 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3024 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3025 else
3026 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3028 {
3029 /* MMX, greg */
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_BEGIN(0, 1);
3032 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3037 else
3038 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3039 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3040 IEM_MC_ADVANCE_RIP();
3041 IEM_MC_END();
3042 }
3043 else
3044 {
3045 /* MMX, [mem] */
3046 IEM_MC_BEGIN(0, 2);
3047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3052 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3053 {
3054 IEM_MC_LOCAL(uint64_t, u64Tmp);
3055 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3056 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3057 }
3058 else
3059 {
3060 IEM_MC_LOCAL(uint32_t, u32Tmp);
3061 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3062 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3063 }
3064 IEM_MC_ADVANCE_RIP();
3065 IEM_MC_END();
3066 }
3067 return VINF_SUCCESS;
3068}
3069
3070/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
3071FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3072{
3073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3074 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3075 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
3076 else
3077 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
3078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3079 {
3080 /* XMM, greg*/
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_BEGIN(0, 1);
3083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3085 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3086 {
3087 IEM_MC_LOCAL(uint64_t, u64Tmp);
3088 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3089 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3090 }
3091 else
3092 {
3093 IEM_MC_LOCAL(uint32_t, u32Tmp);
3094 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3095 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3096 }
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 else
3101 {
3102 /* XMM, [mem] */
3103 IEM_MC_BEGIN(0, 2);
3104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3105 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3110 {
3111 IEM_MC_LOCAL(uint64_t, u64Tmp);
3112 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3113 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3114 }
3115 else
3116 {
3117 IEM_MC_LOCAL(uint32_t, u32Tmp);
3118 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3119 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3120 }
3121 IEM_MC_ADVANCE_RIP();
3122 IEM_MC_END();
3123 }
3124 return VINF_SUCCESS;
3125}
3126
3127/* Opcode 0xf3 0x0f 0x6e - invalid */
3128
3129
3130/** Opcode 0x0f 0x6f. */
3131FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
3132{
3133 bool fAligned = false;
3134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3135 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3136 {
3137 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3138 fAligned = true;
3139 /* fall thru */
3140 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3141 if (fAligned)
3142 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3143 else
3144 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3146 {
3147 /*
3148 * Register, register.
3149 */
3150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3151 IEM_MC_BEGIN(0, 0);
3152 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3153 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3154 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3155 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3156 IEM_MC_ADVANCE_RIP();
3157 IEM_MC_END();
3158 }
3159 else
3160 {
3161 /*
3162 * Register, memory.
3163 */
3164 IEM_MC_BEGIN(0, 2);
3165 IEM_MC_LOCAL(uint128_t, u128Tmp);
3166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3167
3168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3172 if (fAligned)
3173 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3174 else
3175 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3176 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3177
3178 IEM_MC_ADVANCE_RIP();
3179 IEM_MC_END();
3180 }
3181 return VINF_SUCCESS;
3182
3183 case 0: /* MMX */
3184 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3186 {
3187 /*
3188 * Register, register.
3189 */
3190 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3191 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3193 IEM_MC_BEGIN(0, 1);
3194 IEM_MC_LOCAL(uint64_t, u64Tmp);
3195 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3196 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3197 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3198 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3199 IEM_MC_ADVANCE_RIP();
3200 IEM_MC_END();
3201 }
3202 else
3203 {
3204 /*
3205 * Register, memory.
3206 */
3207 IEM_MC_BEGIN(0, 2);
3208 IEM_MC_LOCAL(uint64_t, u64Tmp);
3209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3210
3211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3213 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3214 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3215 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3216 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3217
3218 IEM_MC_ADVANCE_RIP();
3219 IEM_MC_END();
3220 }
3221 return VINF_SUCCESS;
3222
3223 default:
3224 return IEMOP_RAISE_INVALID_OPCODE();
3225 }
3226}
3227
3228
3229/** Opcode 0x0f 0x70. The immediate here is evil! */
3230FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
3231{
3232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3233 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3234 {
3235 case IEM_OP_PRF_SIZE_OP: /* SSE */
3236 case IEM_OP_PRF_REPNZ: /* SSE */
3237 case IEM_OP_PRF_REPZ: /* SSE */
3238 {
3239 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
3240 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3241 {
3242 case IEM_OP_PRF_SIZE_OP:
3243 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
3244 pfnAImpl = iemAImpl_pshufd;
3245 break;
3246 case IEM_OP_PRF_REPNZ:
3247 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
3248 pfnAImpl = iemAImpl_pshuflw;
3249 break;
3250 case IEM_OP_PRF_REPZ:
3251 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
3252 pfnAImpl = iemAImpl_pshufhw;
3253 break;
3254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3255 }
3256 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3257 {
3258 /*
3259 * Register, register.
3260 */
3261 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3263
3264 IEM_MC_BEGIN(3, 0);
3265 IEM_MC_ARG(uint128_t *, pDst, 0);
3266 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3267 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3268 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3269 IEM_MC_PREPARE_SSE_USAGE();
3270 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3271 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3272 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3273 IEM_MC_ADVANCE_RIP();
3274 IEM_MC_END();
3275 }
3276 else
3277 {
3278 /*
3279 * Register, memory.
3280 */
3281 IEM_MC_BEGIN(3, 2);
3282 IEM_MC_ARG(uint128_t *, pDst, 0);
3283 IEM_MC_LOCAL(uint128_t, uSrc);
3284 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3286
3287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3288 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3289 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3292
3293 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3294 IEM_MC_PREPARE_SSE_USAGE();
3295 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3296 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3297
3298 IEM_MC_ADVANCE_RIP();
3299 IEM_MC_END();
3300 }
3301 return VINF_SUCCESS;
3302 }
3303
3304 case 0: /* MMX Extension */
3305 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3306 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3307 {
3308 /*
3309 * Register, register.
3310 */
3311 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313
3314 IEM_MC_BEGIN(3, 0);
3315 IEM_MC_ARG(uint64_t *, pDst, 0);
3316 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3317 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3318 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3319 IEM_MC_PREPARE_FPU_USAGE();
3320 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3321 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3322 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3323 IEM_MC_ADVANCE_RIP();
3324 IEM_MC_END();
3325 }
3326 else
3327 {
3328 /*
3329 * Register, memory.
3330 */
3331 IEM_MC_BEGIN(3, 2);
3332 IEM_MC_ARG(uint64_t *, pDst, 0);
3333 IEM_MC_LOCAL(uint64_t, uSrc);
3334 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3336
3337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3338 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3339 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3342
3343 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3344 IEM_MC_PREPARE_FPU_USAGE();
3345 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3346 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3347
3348 IEM_MC_ADVANCE_RIP();
3349 IEM_MC_END();
3350 }
3351 return VINF_SUCCESS;
3352
3353 default:
3354 return IEMOP_RAISE_INVALID_OPCODE();
3355 }
3356}
3357
3358
3359/** Opcode 0x0f 0x71 11/2. */
3360FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3361
3362/** Opcode 0x66 0x0f 0x71 11/2. */
3363FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3364
3365/** Opcode 0x0f 0x71 11/4. */
3366FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3367
3368/** Opcode 0x66 0x0f 0x71 11/4. */
3369FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3370
3371/** Opcode 0x0f 0x71 11/6. */
3372FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3373
3374/** Opcode 0x66 0x0f 0x71 11/6. */
3375FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3376
3377
3378/** Opcode 0x0f 0x71. */
3379FNIEMOP_DEF(iemOp_Grp12)
3380{
3381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3382 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3383 return IEMOP_RAISE_INVALID_OPCODE();
3384 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3385 {
3386 case 0: case 1: case 3: case 5: case 7:
3387 return IEMOP_RAISE_INVALID_OPCODE();
3388 case 2:
3389 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3390 {
3391 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3392 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3393 default: return IEMOP_RAISE_INVALID_OPCODE();
3394 }
3395 case 4:
3396 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3397 {
3398 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3399 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3400 default: return IEMOP_RAISE_INVALID_OPCODE();
3401 }
3402 case 6:
3403 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3404 {
3405 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3406 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3407 default: return IEMOP_RAISE_INVALID_OPCODE();
3408 }
3409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3410 }
3411}
3412
3413
3414/** Opcode 0x0f 0x72 11/2. */
3415FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3416
3417/** Opcode 0x66 0x0f 0x72 11/2. */
3418FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3419
3420/** Opcode 0x0f 0x72 11/4. */
3421FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3422
3423/** Opcode 0x66 0x0f 0x72 11/4. */
3424FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3425
3426/** Opcode 0x0f 0x72 11/6. */
3427FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3428
3429/** Opcode 0x66 0x0f 0x72 11/6. */
3430FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3431
3432
3433/** Opcode 0x0f 0x72. */
3434FNIEMOP_DEF(iemOp_Grp13)
3435{
3436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3437 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3438 return IEMOP_RAISE_INVALID_OPCODE();
3439 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3440 {
3441 case 0: case 1: case 3: case 5: case 7:
3442 return IEMOP_RAISE_INVALID_OPCODE();
3443 case 2:
3444 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3445 {
3446 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3447 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3448 default: return IEMOP_RAISE_INVALID_OPCODE();
3449 }
3450 case 4:
3451 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3452 {
3453 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3454 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3455 default: return IEMOP_RAISE_INVALID_OPCODE();
3456 }
3457 case 6:
3458 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3459 {
3460 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3461 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3462 default: return IEMOP_RAISE_INVALID_OPCODE();
3463 }
3464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3465 }
3466}
3467
3468
3469/** Opcode 0x0f 0x73 11/2. */
3470FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3471
3472/** Opcode 0x66 0x0f 0x73 11/2. */
3473FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3474
3475/** Opcode 0x66 0x0f 0x73 11/3. */
3476FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3477
3478/** Opcode 0x0f 0x73 11/6. */
3479FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3480
3481/** Opcode 0x66 0x0f 0x73 11/6. */
3482FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3483
3484/** Opcode 0x66 0x0f 0x73 11/7. */
3485FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3486
3487
3488/** Opcode 0x0f 0x73. */
3489FNIEMOP_DEF(iemOp_Grp14)
3490{
3491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3492 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3493 return IEMOP_RAISE_INVALID_OPCODE();
3494 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3495 {
3496 case 0: case 1: case 4: case 5:
3497 return IEMOP_RAISE_INVALID_OPCODE();
3498 case 2:
3499 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3500 {
3501 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3502 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3503 default: return IEMOP_RAISE_INVALID_OPCODE();
3504 }
3505 case 3:
3506 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3507 {
3508 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3509 default: return IEMOP_RAISE_INVALID_OPCODE();
3510 }
3511 case 6:
3512 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3513 {
3514 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3515 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3516 default: return IEMOP_RAISE_INVALID_OPCODE();
3517 }
3518 case 7:
3519 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3520 {
3521 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3522 default: return IEMOP_RAISE_INVALID_OPCODE();
3523 }
3524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3525 }
3526}
3527
3528
3529/**
3530 * Common worker for SSE2 and MMX instructions on the forms:
3531 * pxxx mm1, mm2/mem64
3532 * pxxx xmm1, xmm2/mem128
3533 *
3534 * Proper alignment of the 128-bit operand is enforced.
3535 * Exceptions type 4. SSE2 and MMX cpuid checks.
3536 */
3537FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3538{
3539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3540 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3541 {
3542 case IEM_OP_PRF_SIZE_OP: /* SSE */
3543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3544 {
3545 /*
3546 * Register, register.
3547 */
3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3549 IEM_MC_BEGIN(2, 0);
3550 IEM_MC_ARG(uint128_t *, pDst, 0);
3551 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3552 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3553 IEM_MC_PREPARE_SSE_USAGE();
3554 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3555 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3556 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3557 IEM_MC_ADVANCE_RIP();
3558 IEM_MC_END();
3559 }
3560 else
3561 {
3562 /*
3563 * Register, memory.
3564 */
3565 IEM_MC_BEGIN(2, 2);
3566 IEM_MC_ARG(uint128_t *, pDst, 0);
3567 IEM_MC_LOCAL(uint128_t, uSrc);
3568 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3570
3571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3573 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3575
3576 IEM_MC_PREPARE_SSE_USAGE();
3577 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3578 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3579
3580 IEM_MC_ADVANCE_RIP();
3581 IEM_MC_END();
3582 }
3583 return VINF_SUCCESS;
3584
3585 case 0: /* MMX */
3586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3587 {
3588 /*
3589 * Register, register.
3590 */
3591 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3592 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3594 IEM_MC_BEGIN(2, 0);
3595 IEM_MC_ARG(uint64_t *, pDst, 0);
3596 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3597 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3598 IEM_MC_PREPARE_FPU_USAGE();
3599 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3600 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3601 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3602 IEM_MC_ADVANCE_RIP();
3603 IEM_MC_END();
3604 }
3605 else
3606 {
3607 /*
3608 * Register, memory.
3609 */
3610 IEM_MC_BEGIN(2, 2);
3611 IEM_MC_ARG(uint64_t *, pDst, 0);
3612 IEM_MC_LOCAL(uint64_t, uSrc);
3613 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3615
3616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3618 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3619 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3620
3621 IEM_MC_PREPARE_FPU_USAGE();
3622 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3623 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3624
3625 IEM_MC_ADVANCE_RIP();
3626 IEM_MC_END();
3627 }
3628 return VINF_SUCCESS;
3629
3630 default:
3631 return IEMOP_RAISE_INVALID_OPCODE();
3632 }
3633}
3634
3635
3636/** Opcode 0x0f 0x74. */
3637FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3638{
3639 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3640 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3641}
3642
3643
3644/** Opcode 0x0f 0x75. */
3645FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3646{
3647 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3648 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3649}
3650
3651
3652/** Opcode 0x0f 0x76. */
3653FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3654{
3655 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3656 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3657}
3658
3659
3660/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3661FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3662/* Opcode 0x66 0x0f 0x77 - invalid */
3663/* Opcode 0xf3 0x0f 0x77 - invalid */
3664/* Opcode 0xf2 0x0f 0x77 - invalid */
3665
3666/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3667FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3668/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3669FNIEMOP_STUB(iemOp_AmdGrp17);
3670/* Opcode 0xf3 0x0f 0x78 - invalid */
3671/* Opcode 0xf2 0x0f 0x78 - invalid */
3672
3673/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3674FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3675/* Opcode 0x66 0x0f 0x79 - invalid */
3676/* Opcode 0xf3 0x0f 0x79 - invalid */
3677/* Opcode 0xf2 0x0f 0x79 - invalid */
3678
3679/* Opcode 0x0f 0x7a - invalid */
3680/* Opcode 0x66 0x0f 0x7a - invalid */
3681/* Opcode 0xf3 0x0f 0x7a - invalid */
3682/* Opcode 0xf2 0x0f 0x7a - invalid */
3683
3684/* Opcode 0x0f 0x7b - invalid */
3685/* Opcode 0x66 0x0f 0x7b - invalid */
3686/* Opcode 0xf3 0x0f 0x7b - invalid */
3687/* Opcode 0xf2 0x0f 0x7b - invalid */
3688
3689/* Opcode 0x0f 0x7c - invalid */
3690/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3691FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3692/* Opcode 0xf3 0x0f 0x7c - invalid */
3693/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3694FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3695
3696/* Opcode 0x0f 0x7d - invalid */
3697/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3698FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3699/* Opcode 0xf3 0x0f 0x7d - invalid */
3700/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3701FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3702
3703
3704/** Opcode 0x0f 0x7e. */
3705FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3706{
3707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3708 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3709 {
3710 case IEM_OP_PRF_SIZE_OP: /* SSE */
3711 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3712 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3713 else
3714 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3716 {
3717 /* greg, XMM */
3718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3719 IEM_MC_BEGIN(0, 1);
3720 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3721 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3722 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3723 {
3724 IEM_MC_LOCAL(uint64_t, u64Tmp);
3725 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3726 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3727 }
3728 else
3729 {
3730 IEM_MC_LOCAL(uint32_t, u32Tmp);
3731 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3732 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3733 }
3734 IEM_MC_ADVANCE_RIP();
3735 IEM_MC_END();
3736 }
3737 else
3738 {
3739 /* [mem], XMM */
3740 IEM_MC_BEGIN(0, 2);
3741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3742 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3745 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3746 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3747 {
3748 IEM_MC_LOCAL(uint64_t, u64Tmp);
3749 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3750 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3751 }
3752 else
3753 {
3754 IEM_MC_LOCAL(uint32_t, u32Tmp);
3755 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3756 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3757 }
3758 IEM_MC_ADVANCE_RIP();
3759 IEM_MC_END();
3760 }
3761 return VINF_SUCCESS;
3762
3763 case 0: /* MMX */
3764 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3765 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3766 else
3767 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3769 {
3770 /* greg, MMX */
3771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3772 IEM_MC_BEGIN(0, 1);
3773 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3774 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3775 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3776 {
3777 IEM_MC_LOCAL(uint64_t, u64Tmp);
3778 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3779 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3780 }
3781 else
3782 {
3783 IEM_MC_LOCAL(uint32_t, u32Tmp);
3784 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3785 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3786 }
3787 IEM_MC_ADVANCE_RIP();
3788 IEM_MC_END();
3789 }
3790 else
3791 {
3792 /* [mem], MMX */
3793 IEM_MC_BEGIN(0, 2);
3794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3795 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3798 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3800 {
3801 IEM_MC_LOCAL(uint64_t, u64Tmp);
3802 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3803 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3804 }
3805 else
3806 {
3807 IEM_MC_LOCAL(uint32_t, u32Tmp);
3808 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3809 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3810 }
3811 IEM_MC_ADVANCE_RIP();
3812 IEM_MC_END();
3813 }
3814 return VINF_SUCCESS;
3815
3816 default:
3817 return IEMOP_RAISE_INVALID_OPCODE();
3818 }
3819}
3820
3821
3822/** Opcode 0x0f 0x7f. */
3823FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3824{
3825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3826 bool fAligned = false;
3827 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3828 {
3829 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3830 fAligned = true;
3831 /* fall thru */
3832 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3833 if (fAligned)
3834 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3835 else
3836 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3838 {
3839 /*
3840 * Register, register.
3841 */
3842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3843 IEM_MC_BEGIN(0, 0);
3844 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3845 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3846 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3847 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3848 IEM_MC_ADVANCE_RIP();
3849 IEM_MC_END();
3850 }
3851 else
3852 {
3853 /*
3854 * Register, memory.
3855 */
3856 IEM_MC_BEGIN(0, 2);
3857 IEM_MC_LOCAL(uint128_t, u128Tmp);
3858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3859
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3863 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3864
3865 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3866 if (fAligned)
3867 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3868 else
3869 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3870
3871 IEM_MC_ADVANCE_RIP();
3872 IEM_MC_END();
3873 }
3874 return VINF_SUCCESS;
3875
3876 case 0: /* MMX */
3877 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3878
3879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3880 {
3881 /*
3882 * Register, register.
3883 */
3884 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3885 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3887 IEM_MC_BEGIN(0, 1);
3888 IEM_MC_LOCAL(uint64_t, u64Tmp);
3889 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3890 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3891 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3892 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3893 IEM_MC_ADVANCE_RIP();
3894 IEM_MC_END();
3895 }
3896 else
3897 {
3898 /*
3899 * Register, memory.
3900 */
3901 IEM_MC_BEGIN(0, 2);
3902 IEM_MC_LOCAL(uint64_t, u64Tmp);
3903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3904
3905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3907 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3908 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3909
3910 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3911 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3912
3913 IEM_MC_ADVANCE_RIP();
3914 IEM_MC_END();
3915 }
3916 return VINF_SUCCESS;
3917
3918 default:
3919 return IEMOP_RAISE_INVALID_OPCODE();
3920 }
3921}
3922
3923
3924
3925/** Opcode 0x0f 0x80. */
3926FNIEMOP_DEF(iemOp_jo_Jv)
3927{
3928 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3929 IEMOP_HLP_MIN_386();
3930 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3931 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3932 {
3933 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3935
3936 IEM_MC_BEGIN(0, 0);
3937 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3938 IEM_MC_REL_JMP_S16(i16Imm);
3939 } IEM_MC_ELSE() {
3940 IEM_MC_ADVANCE_RIP();
3941 } IEM_MC_ENDIF();
3942 IEM_MC_END();
3943 }
3944 else
3945 {
3946 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3948
3949 IEM_MC_BEGIN(0, 0);
3950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3951 IEM_MC_REL_JMP_S32(i32Imm);
3952 } IEM_MC_ELSE() {
3953 IEM_MC_ADVANCE_RIP();
3954 } IEM_MC_ENDIF();
3955 IEM_MC_END();
3956 }
3957 return VINF_SUCCESS;
3958}
3959
3960
3961/** Opcode 0x0f 0x81. */
3962FNIEMOP_DEF(iemOp_jno_Jv)
3963{
3964 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3965 IEMOP_HLP_MIN_386();
3966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3967 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3968 {
3969 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3971
3972 IEM_MC_BEGIN(0, 0);
3973 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3974 IEM_MC_ADVANCE_RIP();
3975 } IEM_MC_ELSE() {
3976 IEM_MC_REL_JMP_S16(i16Imm);
3977 } IEM_MC_ENDIF();
3978 IEM_MC_END();
3979 }
3980 else
3981 {
3982 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3984
3985 IEM_MC_BEGIN(0, 0);
3986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3987 IEM_MC_ADVANCE_RIP();
3988 } IEM_MC_ELSE() {
3989 IEM_MC_REL_JMP_S32(i32Imm);
3990 } IEM_MC_ENDIF();
3991 IEM_MC_END();
3992 }
3993 return VINF_SUCCESS;
3994}
3995
3996
3997/** Opcode 0x0f 0x82. */
3998FNIEMOP_DEF(iemOp_jc_Jv)
3999{
4000 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4001 IEMOP_HLP_MIN_386();
4002 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4003 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4004 {
4005 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4007
4008 IEM_MC_BEGIN(0, 0);
4009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4010 IEM_MC_REL_JMP_S16(i16Imm);
4011 } IEM_MC_ELSE() {
4012 IEM_MC_ADVANCE_RIP();
4013 } IEM_MC_ENDIF();
4014 IEM_MC_END();
4015 }
4016 else
4017 {
4018 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020
4021 IEM_MC_BEGIN(0, 0);
4022 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4023 IEM_MC_REL_JMP_S32(i32Imm);
4024 } IEM_MC_ELSE() {
4025 IEM_MC_ADVANCE_RIP();
4026 } IEM_MC_ENDIF();
4027 IEM_MC_END();
4028 }
4029 return VINF_SUCCESS;
4030}
4031
4032
4033/** Opcode 0x0f 0x83. */
4034FNIEMOP_DEF(iemOp_jnc_Jv)
4035{
4036 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4037 IEMOP_HLP_MIN_386();
4038 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4039 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4040 {
4041 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4043
4044 IEM_MC_BEGIN(0, 0);
4045 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4046 IEM_MC_ADVANCE_RIP();
4047 } IEM_MC_ELSE() {
4048 IEM_MC_REL_JMP_S16(i16Imm);
4049 } IEM_MC_ENDIF();
4050 IEM_MC_END();
4051 }
4052 else
4053 {
4054 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4056
4057 IEM_MC_BEGIN(0, 0);
4058 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4059 IEM_MC_ADVANCE_RIP();
4060 } IEM_MC_ELSE() {
4061 IEM_MC_REL_JMP_S32(i32Imm);
4062 } IEM_MC_ENDIF();
4063 IEM_MC_END();
4064 }
4065 return VINF_SUCCESS;
4066}
4067
4068
4069/** Opcode 0x0f 0x84. */
4070FNIEMOP_DEF(iemOp_je_Jv)
4071{
4072 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4073 IEMOP_HLP_MIN_386();
4074 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4075 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4076 {
4077 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4079
4080 IEM_MC_BEGIN(0, 0);
4081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4082 IEM_MC_REL_JMP_S16(i16Imm);
4083 } IEM_MC_ELSE() {
4084 IEM_MC_ADVANCE_RIP();
4085 } IEM_MC_ENDIF();
4086 IEM_MC_END();
4087 }
4088 else
4089 {
4090 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4092
4093 IEM_MC_BEGIN(0, 0);
4094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4095 IEM_MC_REL_JMP_S32(i32Imm);
4096 } IEM_MC_ELSE() {
4097 IEM_MC_ADVANCE_RIP();
4098 } IEM_MC_ENDIF();
4099 IEM_MC_END();
4100 }
4101 return VINF_SUCCESS;
4102}
4103
4104
4105/** Opcode 0x0f 0x85. */
4106FNIEMOP_DEF(iemOp_jne_Jv)
4107{
4108 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4109 IEMOP_HLP_MIN_386();
4110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4111 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4112 {
4113 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4115
4116 IEM_MC_BEGIN(0, 0);
4117 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4118 IEM_MC_ADVANCE_RIP();
4119 } IEM_MC_ELSE() {
4120 IEM_MC_REL_JMP_S16(i16Imm);
4121 } IEM_MC_ENDIF();
4122 IEM_MC_END();
4123 }
4124 else
4125 {
4126 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4128
4129 IEM_MC_BEGIN(0, 0);
4130 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4131 IEM_MC_ADVANCE_RIP();
4132 } IEM_MC_ELSE() {
4133 IEM_MC_REL_JMP_S32(i32Imm);
4134 } IEM_MC_ENDIF();
4135 IEM_MC_END();
4136 }
4137 return VINF_SUCCESS;
4138}
4139
4140
4141/** Opcode 0x0f 0x86. */
4142FNIEMOP_DEF(iemOp_jbe_Jv)
4143{
4144 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4145 IEMOP_HLP_MIN_386();
4146 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4147 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4148 {
4149 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4151
4152 IEM_MC_BEGIN(0, 0);
4153 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4154 IEM_MC_REL_JMP_S16(i16Imm);
4155 } IEM_MC_ELSE() {
4156 IEM_MC_ADVANCE_RIP();
4157 } IEM_MC_ENDIF();
4158 IEM_MC_END();
4159 }
4160 else
4161 {
4162 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4164
4165 IEM_MC_BEGIN(0, 0);
4166 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4167 IEM_MC_REL_JMP_S32(i32Imm);
4168 } IEM_MC_ELSE() {
4169 IEM_MC_ADVANCE_RIP();
4170 } IEM_MC_ENDIF();
4171 IEM_MC_END();
4172 }
4173 return VINF_SUCCESS;
4174}
4175
4176
4177/** Opcode 0x0f 0x87. */
4178FNIEMOP_DEF(iemOp_jnbe_Jv)
4179{
4180 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4181 IEMOP_HLP_MIN_386();
4182 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4183 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4184 {
4185 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4187
4188 IEM_MC_BEGIN(0, 0);
4189 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4190 IEM_MC_ADVANCE_RIP();
4191 } IEM_MC_ELSE() {
4192 IEM_MC_REL_JMP_S16(i16Imm);
4193 } IEM_MC_ENDIF();
4194 IEM_MC_END();
4195 }
4196 else
4197 {
4198 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4200
4201 IEM_MC_BEGIN(0, 0);
4202 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4203 IEM_MC_ADVANCE_RIP();
4204 } IEM_MC_ELSE() {
4205 IEM_MC_REL_JMP_S32(i32Imm);
4206 } IEM_MC_ENDIF();
4207 IEM_MC_END();
4208 }
4209 return VINF_SUCCESS;
4210}
4211
4212
4213/** Opcode 0x0f 0x88. */
4214FNIEMOP_DEF(iemOp_js_Jv)
4215{
4216 IEMOP_MNEMONIC(js_Jv, "js Jv");
4217 IEMOP_HLP_MIN_386();
4218 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4219 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4220 {
4221 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4223
4224 IEM_MC_BEGIN(0, 0);
4225 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4226 IEM_MC_REL_JMP_S16(i16Imm);
4227 } IEM_MC_ELSE() {
4228 IEM_MC_ADVANCE_RIP();
4229 } IEM_MC_ENDIF();
4230 IEM_MC_END();
4231 }
4232 else
4233 {
4234 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4236
4237 IEM_MC_BEGIN(0, 0);
4238 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4239 IEM_MC_REL_JMP_S32(i32Imm);
4240 } IEM_MC_ELSE() {
4241 IEM_MC_ADVANCE_RIP();
4242 } IEM_MC_ENDIF();
4243 IEM_MC_END();
4244 }
4245 return VINF_SUCCESS;
4246}
4247
4248
4249/** Opcode 0x0f 0x89. */
4250FNIEMOP_DEF(iemOp_jns_Jv)
4251{
4252 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4253 IEMOP_HLP_MIN_386();
4254 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4255 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4256 {
4257 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4259
4260 IEM_MC_BEGIN(0, 0);
4261 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4262 IEM_MC_ADVANCE_RIP();
4263 } IEM_MC_ELSE() {
4264 IEM_MC_REL_JMP_S16(i16Imm);
4265 } IEM_MC_ENDIF();
4266 IEM_MC_END();
4267 }
4268 else
4269 {
4270 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4272
4273 IEM_MC_BEGIN(0, 0);
4274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4275 IEM_MC_ADVANCE_RIP();
4276 } IEM_MC_ELSE() {
4277 IEM_MC_REL_JMP_S32(i32Imm);
4278 } IEM_MC_ENDIF();
4279 IEM_MC_END();
4280 }
4281 return VINF_SUCCESS;
4282}
4283
4284
4285/** Opcode 0x0f 0x8a. */
4286FNIEMOP_DEF(iemOp_jp_Jv)
4287{
4288 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4289 IEMOP_HLP_MIN_386();
4290 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4291 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4292 {
4293 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4295
4296 IEM_MC_BEGIN(0, 0);
4297 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4298 IEM_MC_REL_JMP_S16(i16Imm);
4299 } IEM_MC_ELSE() {
4300 IEM_MC_ADVANCE_RIP();
4301 } IEM_MC_ENDIF();
4302 IEM_MC_END();
4303 }
4304 else
4305 {
4306 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4308
4309 IEM_MC_BEGIN(0, 0);
4310 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4311 IEM_MC_REL_JMP_S32(i32Imm);
4312 } IEM_MC_ELSE() {
4313 IEM_MC_ADVANCE_RIP();
4314 } IEM_MC_ENDIF();
4315 IEM_MC_END();
4316 }
4317 return VINF_SUCCESS;
4318}
4319
4320
4321/** Opcode 0x0f 0x8b. */
4322FNIEMOP_DEF(iemOp_jnp_Jv)
4323{
4324 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4325 IEMOP_HLP_MIN_386();
4326 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4327 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4328 {
4329 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4331
4332 IEM_MC_BEGIN(0, 0);
4333 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4334 IEM_MC_ADVANCE_RIP();
4335 } IEM_MC_ELSE() {
4336 IEM_MC_REL_JMP_S16(i16Imm);
4337 } IEM_MC_ENDIF();
4338 IEM_MC_END();
4339 }
4340 else
4341 {
4342 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4344
4345 IEM_MC_BEGIN(0, 0);
4346 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4347 IEM_MC_ADVANCE_RIP();
4348 } IEM_MC_ELSE() {
4349 IEM_MC_REL_JMP_S32(i32Imm);
4350 } IEM_MC_ENDIF();
4351 IEM_MC_END();
4352 }
4353 return VINF_SUCCESS;
4354}
4355
4356
4357/** Opcode 0x0f 0x8c. */
4358FNIEMOP_DEF(iemOp_jl_Jv)
4359{
4360 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4361 IEMOP_HLP_MIN_386();
4362 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4363 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4364 {
4365 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4367
4368 IEM_MC_BEGIN(0, 0);
4369 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4370 IEM_MC_REL_JMP_S16(i16Imm);
4371 } IEM_MC_ELSE() {
4372 IEM_MC_ADVANCE_RIP();
4373 } IEM_MC_ENDIF();
4374 IEM_MC_END();
4375 }
4376 else
4377 {
4378 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4380
4381 IEM_MC_BEGIN(0, 0);
4382 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4383 IEM_MC_REL_JMP_S32(i32Imm);
4384 } IEM_MC_ELSE() {
4385 IEM_MC_ADVANCE_RIP();
4386 } IEM_MC_ENDIF();
4387 IEM_MC_END();
4388 }
4389 return VINF_SUCCESS;
4390}
4391
4392
4393/** Opcode 0x0f 0x8d. */
4394FNIEMOP_DEF(iemOp_jnl_Jv)
4395{
4396 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4397 IEMOP_HLP_MIN_386();
4398 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4399 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4400 {
4401 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4403
4404 IEM_MC_BEGIN(0, 0);
4405 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4406 IEM_MC_ADVANCE_RIP();
4407 } IEM_MC_ELSE() {
4408 IEM_MC_REL_JMP_S16(i16Imm);
4409 } IEM_MC_ENDIF();
4410 IEM_MC_END();
4411 }
4412 else
4413 {
4414 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4416
4417 IEM_MC_BEGIN(0, 0);
4418 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4419 IEM_MC_ADVANCE_RIP();
4420 } IEM_MC_ELSE() {
4421 IEM_MC_REL_JMP_S32(i32Imm);
4422 } IEM_MC_ENDIF();
4423 IEM_MC_END();
4424 }
4425 return VINF_SUCCESS;
4426}
4427
4428
4429/** Opcode 0x0f 0x8e. */
4430FNIEMOP_DEF(iemOp_jle_Jv)
4431{
4432 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4433 IEMOP_HLP_MIN_386();
4434 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4435 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4436 {
4437 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4439
4440 IEM_MC_BEGIN(0, 0);
4441 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4442 IEM_MC_REL_JMP_S16(i16Imm);
4443 } IEM_MC_ELSE() {
4444 IEM_MC_ADVANCE_RIP();
4445 } IEM_MC_ENDIF();
4446 IEM_MC_END();
4447 }
4448 else
4449 {
4450 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4452
4453 IEM_MC_BEGIN(0, 0);
4454 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4455 IEM_MC_REL_JMP_S32(i32Imm);
4456 } IEM_MC_ELSE() {
4457 IEM_MC_ADVANCE_RIP();
4458 } IEM_MC_ENDIF();
4459 IEM_MC_END();
4460 }
4461 return VINF_SUCCESS;
4462}
4463
4464
4465/** Opcode 0x0f 0x8f. */
4466FNIEMOP_DEF(iemOp_jnle_Jv)
4467{
4468 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4469 IEMOP_HLP_MIN_386();
4470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4471 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4472 {
4473 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4475
4476 IEM_MC_BEGIN(0, 0);
4477 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4478 IEM_MC_ADVANCE_RIP();
4479 } IEM_MC_ELSE() {
4480 IEM_MC_REL_JMP_S16(i16Imm);
4481 } IEM_MC_ENDIF();
4482 IEM_MC_END();
4483 }
4484 else
4485 {
4486 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4488
4489 IEM_MC_BEGIN(0, 0);
4490 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4491 IEM_MC_ADVANCE_RIP();
4492 } IEM_MC_ELSE() {
4493 IEM_MC_REL_JMP_S32(i32Imm);
4494 } IEM_MC_ENDIF();
4495 IEM_MC_END();
4496 }
4497 return VINF_SUCCESS;
4498}
4499
4500
4501/** Opcode 0x0f 0x90. */
4502FNIEMOP_DEF(iemOp_seto_Eb)
4503{
4504 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4505 IEMOP_HLP_MIN_386();
4506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4507
4508 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4509 * any way. AMD says it's "unused", whatever that means. We're
4510 * ignoring for now. */
4511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4512 {
4513 /* register target */
4514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4515 IEM_MC_BEGIN(0, 0);
4516 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4517 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4518 } IEM_MC_ELSE() {
4519 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4520 } IEM_MC_ENDIF();
4521 IEM_MC_ADVANCE_RIP();
4522 IEM_MC_END();
4523 }
4524 else
4525 {
4526 /* memory target */
4527 IEM_MC_BEGIN(0, 1);
4528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4531 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4532 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4533 } IEM_MC_ELSE() {
4534 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4535 } IEM_MC_ENDIF();
4536 IEM_MC_ADVANCE_RIP();
4537 IEM_MC_END();
4538 }
4539 return VINF_SUCCESS;
4540}
4541
4542
4543/** Opcode 0x0f 0x91. */
4544FNIEMOP_DEF(iemOp_setno_Eb)
4545{
4546 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4547 IEMOP_HLP_MIN_386();
4548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4549
4550 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4551 * any way. AMD says it's "unused", whatever that means. We're
4552 * ignoring for now. */
4553 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4554 {
4555 /* register target */
4556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4557 IEM_MC_BEGIN(0, 0);
4558 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4559 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4560 } IEM_MC_ELSE() {
4561 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4562 } IEM_MC_ENDIF();
4563 IEM_MC_ADVANCE_RIP();
4564 IEM_MC_END();
4565 }
4566 else
4567 {
4568 /* memory target */
4569 IEM_MC_BEGIN(0, 1);
4570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4574 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4575 } IEM_MC_ELSE() {
4576 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4577 } IEM_MC_ENDIF();
4578 IEM_MC_ADVANCE_RIP();
4579 IEM_MC_END();
4580 }
4581 return VINF_SUCCESS;
4582}
4583
4584
4585/** Opcode 0x0f 0x92. */
4586FNIEMOP_DEF(iemOp_setc_Eb)
4587{
4588 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4589 IEMOP_HLP_MIN_386();
4590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4591
4592 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4593 * any way. AMD says it's "unused", whatever that means. We're
4594 * ignoring for now. */
4595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4596 {
4597 /* register target */
4598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4599 IEM_MC_BEGIN(0, 0);
4600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4601 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4602 } IEM_MC_ELSE() {
4603 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4604 } IEM_MC_ENDIF();
4605 IEM_MC_ADVANCE_RIP();
4606 IEM_MC_END();
4607 }
4608 else
4609 {
4610 /* memory target */
4611 IEM_MC_BEGIN(0, 1);
4612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4616 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4617 } IEM_MC_ELSE() {
4618 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4619 } IEM_MC_ENDIF();
4620 IEM_MC_ADVANCE_RIP();
4621 IEM_MC_END();
4622 }
4623 return VINF_SUCCESS;
4624}
4625
4626
4627/** Opcode 0x0f 0x93. */
4628FNIEMOP_DEF(iemOp_setnc_Eb)
4629{
4630 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4631 IEMOP_HLP_MIN_386();
4632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4633
4634 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4635 * any way. AMD says it's "unused", whatever that means. We're
4636 * ignoring for now. */
4637 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4638 {
4639 /* register target */
4640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4641 IEM_MC_BEGIN(0, 0);
4642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4643 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4644 } IEM_MC_ELSE() {
4645 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4646 } IEM_MC_ENDIF();
4647 IEM_MC_ADVANCE_RIP();
4648 IEM_MC_END();
4649 }
4650 else
4651 {
4652 /* memory target */
4653 IEM_MC_BEGIN(0, 1);
4654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4658 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4659 } IEM_MC_ELSE() {
4660 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4661 } IEM_MC_ENDIF();
4662 IEM_MC_ADVANCE_RIP();
4663 IEM_MC_END();
4664 }
4665 return VINF_SUCCESS;
4666}
4667
4668
4669/** Opcode 0x0f 0x94. */
4670FNIEMOP_DEF(iemOp_sete_Eb)
4671{
4672 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4673 IEMOP_HLP_MIN_386();
4674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4675
4676 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4677 * any way. AMD says it's "unused", whatever that means. We're
4678 * ignoring for now. */
4679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4680 {
4681 /* register target */
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4683 IEM_MC_BEGIN(0, 0);
4684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4685 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4686 } IEM_MC_ELSE() {
4687 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4688 } IEM_MC_ENDIF();
4689 IEM_MC_ADVANCE_RIP();
4690 IEM_MC_END();
4691 }
4692 else
4693 {
4694 /* memory target */
4695 IEM_MC_BEGIN(0, 1);
4696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4700 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4701 } IEM_MC_ELSE() {
4702 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4703 } IEM_MC_ENDIF();
4704 IEM_MC_ADVANCE_RIP();
4705 IEM_MC_END();
4706 }
4707 return VINF_SUCCESS;
4708}
4709
4710
4711/** Opcode 0x0f 0x95. */
4712FNIEMOP_DEF(iemOp_setne_Eb)
4713{
4714 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4715 IEMOP_HLP_MIN_386();
4716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4717
4718 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4719 * any way. AMD says it's "unused", whatever that means. We're
4720 * ignoring for now. */
4721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4722 {
4723 /* register target */
4724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4725 IEM_MC_BEGIN(0, 0);
4726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4727 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4728 } IEM_MC_ELSE() {
4729 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4730 } IEM_MC_ENDIF();
4731 IEM_MC_ADVANCE_RIP();
4732 IEM_MC_END();
4733 }
4734 else
4735 {
4736 /* memory target */
4737 IEM_MC_BEGIN(0, 1);
4738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4742 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4743 } IEM_MC_ELSE() {
4744 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4745 } IEM_MC_ENDIF();
4746 IEM_MC_ADVANCE_RIP();
4747 IEM_MC_END();
4748 }
4749 return VINF_SUCCESS;
4750}
4751
4752
4753/** Opcode 0x0f 0x96. */
4754FNIEMOP_DEF(iemOp_setbe_Eb)
4755{
4756 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4757 IEMOP_HLP_MIN_386();
4758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4759
4760 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4761 * any way. AMD says it's "unused", whatever that means. We're
4762 * ignoring for now. */
4763 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4764 {
4765 /* register target */
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4767 IEM_MC_BEGIN(0, 0);
4768 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4769 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4770 } IEM_MC_ELSE() {
4771 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4772 } IEM_MC_ENDIF();
4773 IEM_MC_ADVANCE_RIP();
4774 IEM_MC_END();
4775 }
4776 else
4777 {
4778 /* memory target */
4779 IEM_MC_BEGIN(0, 1);
4780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4783 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4784 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4785 } IEM_MC_ELSE() {
4786 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4787 } IEM_MC_ENDIF();
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 }
4791 return VINF_SUCCESS;
4792}
4793
4794
4795/** Opcode 0x0f 0x97. */
4796FNIEMOP_DEF(iemOp_setnbe_Eb)
4797{
4798 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4799 IEMOP_HLP_MIN_386();
4800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4801
4802 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4803 * any way. AMD says it's "unused", whatever that means. We're
4804 * ignoring for now. */
4805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4806 {
4807 /* register target */
4808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4809 IEM_MC_BEGIN(0, 0);
4810 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4811 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4812 } IEM_MC_ELSE() {
4813 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4814 } IEM_MC_ENDIF();
4815 IEM_MC_ADVANCE_RIP();
4816 IEM_MC_END();
4817 }
4818 else
4819 {
4820 /* memory target */
4821 IEM_MC_BEGIN(0, 1);
4822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4825 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4826 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4827 } IEM_MC_ELSE() {
4828 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4829 } IEM_MC_ENDIF();
4830 IEM_MC_ADVANCE_RIP();
4831 IEM_MC_END();
4832 }
4833 return VINF_SUCCESS;
4834}
4835
4836
4837/** Opcode 0x0f 0x98. */
4838FNIEMOP_DEF(iemOp_sets_Eb)
4839{
4840 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4841 IEMOP_HLP_MIN_386();
4842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4843
4844 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4845 * any way. AMD says it's "unused", whatever that means. We're
4846 * ignoring for now. */
4847 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4848 {
4849 /* register target */
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851 IEM_MC_BEGIN(0, 0);
4852 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4853 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4854 } IEM_MC_ELSE() {
4855 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4856 } IEM_MC_ENDIF();
4857 IEM_MC_ADVANCE_RIP();
4858 IEM_MC_END();
4859 }
4860 else
4861 {
4862 /* memory target */
4863 IEM_MC_BEGIN(0, 1);
4864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4868 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4869 } IEM_MC_ELSE() {
4870 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4871 } IEM_MC_ENDIF();
4872 IEM_MC_ADVANCE_RIP();
4873 IEM_MC_END();
4874 }
4875 return VINF_SUCCESS;
4876}
4877
4878
4879/** Opcode 0x0f 0x99. */
4880FNIEMOP_DEF(iemOp_setns_Eb)
4881{
4882 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4883 IEMOP_HLP_MIN_386();
4884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4885
4886 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4887 * any way. AMD says it's "unused", whatever that means. We're
4888 * ignoring for now. */
4889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4890 {
4891 /* register target */
4892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4893 IEM_MC_BEGIN(0, 0);
4894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4895 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4896 } IEM_MC_ELSE() {
4897 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4898 } IEM_MC_ENDIF();
4899 IEM_MC_ADVANCE_RIP();
4900 IEM_MC_END();
4901 }
4902 else
4903 {
4904 /* memory target */
4905 IEM_MC_BEGIN(0, 1);
4906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4910 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4911 } IEM_MC_ELSE() {
4912 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4913 } IEM_MC_ENDIF();
4914 IEM_MC_ADVANCE_RIP();
4915 IEM_MC_END();
4916 }
4917 return VINF_SUCCESS;
4918}
4919
4920
4921/** Opcode 0x0f 0x9a. */
4922FNIEMOP_DEF(iemOp_setp_Eb)
4923{
4924 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4925 IEMOP_HLP_MIN_386();
4926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4927
4928 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4929 * any way. AMD says it's "unused", whatever that means. We're
4930 * ignoring for now. */
4931 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4932 {
4933 /* register target */
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4935 IEM_MC_BEGIN(0, 0);
4936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4937 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4938 } IEM_MC_ELSE() {
4939 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4940 } IEM_MC_ENDIF();
4941 IEM_MC_ADVANCE_RIP();
4942 IEM_MC_END();
4943 }
4944 else
4945 {
4946 /* memory target */
4947 IEM_MC_BEGIN(0, 1);
4948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4952 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4953 } IEM_MC_ELSE() {
4954 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4955 } IEM_MC_ENDIF();
4956 IEM_MC_ADVANCE_RIP();
4957 IEM_MC_END();
4958 }
4959 return VINF_SUCCESS;
4960}
4961
4962
4963/** Opcode 0x0f 0x9b. */
4964FNIEMOP_DEF(iemOp_setnp_Eb)
4965{
4966 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4967 IEMOP_HLP_MIN_386();
4968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4969
4970 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4971 * any way. AMD says it's "unused", whatever that means. We're
4972 * ignoring for now. */
4973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4974 {
4975 /* register target */
4976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4977 IEM_MC_BEGIN(0, 0);
4978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4979 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4980 } IEM_MC_ELSE() {
4981 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4982 } IEM_MC_ENDIF();
4983 IEM_MC_ADVANCE_RIP();
4984 IEM_MC_END();
4985 }
4986 else
4987 {
4988 /* memory target */
4989 IEM_MC_BEGIN(0, 1);
4990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4993 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4994 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4995 } IEM_MC_ELSE() {
4996 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4997 } IEM_MC_ENDIF();
4998 IEM_MC_ADVANCE_RIP();
4999 IEM_MC_END();
5000 }
5001 return VINF_SUCCESS;
5002}
5003
5004
5005/** Opcode 0x0f 0x9c. */
5006FNIEMOP_DEF(iemOp_setl_Eb)
5007{
5008 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5009 IEMOP_HLP_MIN_386();
5010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5011
5012 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5013 * any way. AMD says it's "unused", whatever that means. We're
5014 * ignoring for now. */
5015 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5016 {
5017 /* register target */
5018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5019 IEM_MC_BEGIN(0, 0);
5020 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5021 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5022 } IEM_MC_ELSE() {
5023 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5024 } IEM_MC_ENDIF();
5025 IEM_MC_ADVANCE_RIP();
5026 IEM_MC_END();
5027 }
5028 else
5029 {
5030 /* memory target */
5031 IEM_MC_BEGIN(0, 1);
5032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5035 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5036 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5037 } IEM_MC_ELSE() {
5038 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5039 } IEM_MC_ENDIF();
5040 IEM_MC_ADVANCE_RIP();
5041 IEM_MC_END();
5042 }
5043 return VINF_SUCCESS;
5044}
5045
5046
5047/** Opcode 0x0f 0x9d. */
5048FNIEMOP_DEF(iemOp_setnl_Eb)
5049{
5050 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5051 IEMOP_HLP_MIN_386();
5052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5053
5054 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5055 * any way. AMD says it's "unused", whatever that means. We're
5056 * ignoring for now. */
5057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5058 {
5059 /* register target */
5060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5061 IEM_MC_BEGIN(0, 0);
5062 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5063 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5064 } IEM_MC_ELSE() {
5065 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5066 } IEM_MC_ENDIF();
5067 IEM_MC_ADVANCE_RIP();
5068 IEM_MC_END();
5069 }
5070 else
5071 {
5072 /* memory target */
5073 IEM_MC_BEGIN(0, 1);
5074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5077 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5078 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5079 } IEM_MC_ELSE() {
5080 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5081 } IEM_MC_ENDIF();
5082 IEM_MC_ADVANCE_RIP();
5083 IEM_MC_END();
5084 }
5085 return VINF_SUCCESS;
5086}
5087
5088
5089/** Opcode 0x0f 0x9e. */
5090FNIEMOP_DEF(iemOp_setle_Eb)
5091{
5092 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5093 IEMOP_HLP_MIN_386();
5094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5095
5096 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5097 * any way. AMD says it's "unused", whatever that means. We're
5098 * ignoring for now. */
5099 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5100 {
5101 /* register target */
5102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5103 IEM_MC_BEGIN(0, 0);
5104 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5105 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5106 } IEM_MC_ELSE() {
5107 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5108 } IEM_MC_ENDIF();
5109 IEM_MC_ADVANCE_RIP();
5110 IEM_MC_END();
5111 }
5112 else
5113 {
5114 /* memory target */
5115 IEM_MC_BEGIN(0, 1);
5116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5119 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5120 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5121 } IEM_MC_ELSE() {
5122 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5123 } IEM_MC_ENDIF();
5124 IEM_MC_ADVANCE_RIP();
5125 IEM_MC_END();
5126 }
5127 return VINF_SUCCESS;
5128}
5129
5130
5131/** Opcode 0x0f 0x9f. */
5132FNIEMOP_DEF(iemOp_setnle_Eb)
5133{
5134 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5135 IEMOP_HLP_MIN_386();
5136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5137
5138 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5139 * any way. AMD says it's "unused", whatever that means. We're
5140 * ignoring for now. */
5141 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5142 {
5143 /* register target */
5144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5145 IEM_MC_BEGIN(0, 0);
5146 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5147 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5148 } IEM_MC_ELSE() {
5149 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5150 } IEM_MC_ENDIF();
5151 IEM_MC_ADVANCE_RIP();
5152 IEM_MC_END();
5153 }
5154 else
5155 {
5156 /* memory target */
5157 IEM_MC_BEGIN(0, 1);
5158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5162 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5163 } IEM_MC_ELSE() {
5164 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5165 } IEM_MC_ENDIF();
5166 IEM_MC_ADVANCE_RIP();
5167 IEM_MC_END();
5168 }
5169 return VINF_SUCCESS;
5170}
5171
5172
5173/**
5174 * Common 'push segment-register' helper.
5175 */
5176FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5177{
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179 if (iReg < X86_SREG_FS)
5180 IEMOP_HLP_NO_64BIT();
5181 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5182
5183 switch (pVCpu->iem.s.enmEffOpSize)
5184 {
5185 case IEMMODE_16BIT:
5186 IEM_MC_BEGIN(0, 1);
5187 IEM_MC_LOCAL(uint16_t, u16Value);
5188 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5189 IEM_MC_PUSH_U16(u16Value);
5190 IEM_MC_ADVANCE_RIP();
5191 IEM_MC_END();
5192 break;
5193
5194 case IEMMODE_32BIT:
5195 IEM_MC_BEGIN(0, 1);
5196 IEM_MC_LOCAL(uint32_t, u32Value);
5197 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5198 IEM_MC_PUSH_U32_SREG(u32Value);
5199 IEM_MC_ADVANCE_RIP();
5200 IEM_MC_END();
5201 break;
5202
5203 case IEMMODE_64BIT:
5204 IEM_MC_BEGIN(0, 1);
5205 IEM_MC_LOCAL(uint64_t, u64Value);
5206 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5207 IEM_MC_PUSH_U64(u64Value);
5208 IEM_MC_ADVANCE_RIP();
5209 IEM_MC_END();
5210 break;
5211 }
5212
5213 return VINF_SUCCESS;
5214}
5215
5216
5217/** Opcode 0x0f 0xa0. */
5218FNIEMOP_DEF(iemOp_push_fs)
5219{
5220 IEMOP_MNEMONIC(push_fs, "push fs");
5221 IEMOP_HLP_MIN_386();
5222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5223 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5224}
5225
5226
5227/** Opcode 0x0f 0xa1. */
5228FNIEMOP_DEF(iemOp_pop_fs)
5229{
5230 IEMOP_MNEMONIC(pop_fs, "pop fs");
5231 IEMOP_HLP_MIN_386();
5232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5233 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5234}
5235
5236
5237/** Opcode 0x0f 0xa2. */
5238FNIEMOP_DEF(iemOp_cpuid)
5239{
5240 IEMOP_MNEMONIC(cpuid, "cpuid");
5241 IEMOP_HLP_MIN_486(); /* not all 486es. */
5242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5243 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5244}
5245
5246
5247/**
5248 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5249 * iemOp_bts_Ev_Gv.
5250 */
5251FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5252{
5253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5254 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5255
5256 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5257 {
5258 /* register destination. */
5259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5260 switch (pVCpu->iem.s.enmEffOpSize)
5261 {
5262 case IEMMODE_16BIT:
5263 IEM_MC_BEGIN(3, 0);
5264 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5265 IEM_MC_ARG(uint16_t, u16Src, 1);
5266 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5267
5268 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5269 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5270 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5271 IEM_MC_REF_EFLAGS(pEFlags);
5272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5273
5274 IEM_MC_ADVANCE_RIP();
5275 IEM_MC_END();
5276 return VINF_SUCCESS;
5277
5278 case IEMMODE_32BIT:
5279 IEM_MC_BEGIN(3, 0);
5280 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5281 IEM_MC_ARG(uint32_t, u32Src, 1);
5282 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5283
5284 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5285 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5286 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5287 IEM_MC_REF_EFLAGS(pEFlags);
5288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5289
5290 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5291 IEM_MC_ADVANCE_RIP();
5292 IEM_MC_END();
5293 return VINF_SUCCESS;
5294
5295 case IEMMODE_64BIT:
5296 IEM_MC_BEGIN(3, 0);
5297 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5298 IEM_MC_ARG(uint64_t, u64Src, 1);
5299 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5300
5301 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5302 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5303 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5304 IEM_MC_REF_EFLAGS(pEFlags);
5305 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5306
5307 IEM_MC_ADVANCE_RIP();
5308 IEM_MC_END();
5309 return VINF_SUCCESS;
5310
5311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5312 }
5313 }
5314 else
5315 {
5316 /* memory destination. */
5317
5318 uint32_t fAccess;
5319 if (pImpl->pfnLockedU16)
5320 fAccess = IEM_ACCESS_DATA_RW;
5321 else /* BT */
5322 fAccess = IEM_ACCESS_DATA_R;
5323
5324 /** @todo test negative bit offsets! */
5325 switch (pVCpu->iem.s.enmEffOpSize)
5326 {
5327 case IEMMODE_16BIT:
5328 IEM_MC_BEGIN(3, 2);
5329 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5330 IEM_MC_ARG(uint16_t, u16Src, 1);
5331 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5333 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5334
5335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5336 if (pImpl->pfnLockedU16)
5337 IEMOP_HLP_DONE_DECODING();
5338 else
5339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5340 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5341 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5342 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5343 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5344 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5345 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5346 IEM_MC_FETCH_EFLAGS(EFlags);
5347
5348 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5349 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5350 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5351 else
5352 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5354
5355 IEM_MC_COMMIT_EFLAGS(EFlags);
5356 IEM_MC_ADVANCE_RIP();
5357 IEM_MC_END();
5358 return VINF_SUCCESS;
5359
5360 case IEMMODE_32BIT:
5361 IEM_MC_BEGIN(3, 2);
5362 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5363 IEM_MC_ARG(uint32_t, u32Src, 1);
5364 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5366 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5367
5368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5369 if (pImpl->pfnLockedU16)
5370 IEMOP_HLP_DONE_DECODING();
5371 else
5372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5373 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5374 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5375 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5376 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5377 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5378 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5379 IEM_MC_FETCH_EFLAGS(EFlags);
5380
5381 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5382 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5383 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5384 else
5385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5386 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5387
5388 IEM_MC_COMMIT_EFLAGS(EFlags);
5389 IEM_MC_ADVANCE_RIP();
5390 IEM_MC_END();
5391 return VINF_SUCCESS;
5392
5393 case IEMMODE_64BIT:
5394 IEM_MC_BEGIN(3, 2);
5395 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5396 IEM_MC_ARG(uint64_t, u64Src, 1);
5397 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5399 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5400
5401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5402 if (pImpl->pfnLockedU16)
5403 IEMOP_HLP_DONE_DECODING();
5404 else
5405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5406 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5407 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5408 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5409 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5410 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5411 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5412 IEM_MC_FETCH_EFLAGS(EFlags);
5413
5414 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5415 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5416 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5417 else
5418 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5419 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5420
5421 IEM_MC_COMMIT_EFLAGS(EFlags);
5422 IEM_MC_ADVANCE_RIP();
5423 IEM_MC_END();
5424 return VINF_SUCCESS;
5425
5426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5427 }
5428 }
5429}
5430
5431
5432/** Opcode 0x0f 0xa3. */
5433FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5434{
5435 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5436 IEMOP_HLP_MIN_386();
5437 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5438}
5439
5440
5441/**
5442 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5443 */
5444FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5445{
5446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5447 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5448
5449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5450 {
5451 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5453
5454 switch (pVCpu->iem.s.enmEffOpSize)
5455 {
5456 case IEMMODE_16BIT:
5457 IEM_MC_BEGIN(4, 0);
5458 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5459 IEM_MC_ARG(uint16_t, u16Src, 1);
5460 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5461 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5462
5463 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5464 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5465 IEM_MC_REF_EFLAGS(pEFlags);
5466 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5467
5468 IEM_MC_ADVANCE_RIP();
5469 IEM_MC_END();
5470 return VINF_SUCCESS;
5471
5472 case IEMMODE_32BIT:
5473 IEM_MC_BEGIN(4, 0);
5474 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5475 IEM_MC_ARG(uint32_t, u32Src, 1);
5476 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5477 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5478
5479 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5480 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5481 IEM_MC_REF_EFLAGS(pEFlags);
5482 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5483
5484 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5485 IEM_MC_ADVANCE_RIP();
5486 IEM_MC_END();
5487 return VINF_SUCCESS;
5488
5489 case IEMMODE_64BIT:
5490 IEM_MC_BEGIN(4, 0);
5491 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5492 IEM_MC_ARG(uint64_t, u64Src, 1);
5493 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5494 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5495
5496 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5497 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5498 IEM_MC_REF_EFLAGS(pEFlags);
5499 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5500
5501 IEM_MC_ADVANCE_RIP();
5502 IEM_MC_END();
5503 return VINF_SUCCESS;
5504
5505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5506 }
5507 }
5508 else
5509 {
5510 switch (pVCpu->iem.s.enmEffOpSize)
5511 {
5512 case IEMMODE_16BIT:
5513 IEM_MC_BEGIN(4, 2);
5514 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5515 IEM_MC_ARG(uint16_t, u16Src, 1);
5516 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5517 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5519
5520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5521 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5522 IEM_MC_ASSIGN(cShiftArg, cShift);
5523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5524 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5525 IEM_MC_FETCH_EFLAGS(EFlags);
5526 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5527 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5528
5529 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5530 IEM_MC_COMMIT_EFLAGS(EFlags);
5531 IEM_MC_ADVANCE_RIP();
5532 IEM_MC_END();
5533 return VINF_SUCCESS;
5534
5535 case IEMMODE_32BIT:
5536 IEM_MC_BEGIN(4, 2);
5537 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5538 IEM_MC_ARG(uint32_t, u32Src, 1);
5539 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5540 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5542
5543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5544 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5545 IEM_MC_ASSIGN(cShiftArg, cShift);
5546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5547 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5548 IEM_MC_FETCH_EFLAGS(EFlags);
5549 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5550 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5551
5552 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5553 IEM_MC_COMMIT_EFLAGS(EFlags);
5554 IEM_MC_ADVANCE_RIP();
5555 IEM_MC_END();
5556 return VINF_SUCCESS;
5557
5558 case IEMMODE_64BIT:
5559 IEM_MC_BEGIN(4, 2);
5560 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5561 IEM_MC_ARG(uint64_t, u64Src, 1);
5562 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5563 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5565
5566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5567 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5568 IEM_MC_ASSIGN(cShiftArg, cShift);
5569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5570 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5571 IEM_MC_FETCH_EFLAGS(EFlags);
5572 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5573 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5574
5575 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5576 IEM_MC_COMMIT_EFLAGS(EFlags);
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 return VINF_SUCCESS;
5580
5581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5582 }
5583 }
5584}
5585
5586
5587/**
5588 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5589 */
5590FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5591{
5592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5593 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5594
5595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5596 {
5597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5598
5599 switch (pVCpu->iem.s.enmEffOpSize)
5600 {
5601 case IEMMODE_16BIT:
5602 IEM_MC_BEGIN(4, 0);
5603 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5604 IEM_MC_ARG(uint16_t, u16Src, 1);
5605 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5606 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5607
5608 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5609 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5610 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5611 IEM_MC_REF_EFLAGS(pEFlags);
5612 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5613
5614 IEM_MC_ADVANCE_RIP();
5615 IEM_MC_END();
5616 return VINF_SUCCESS;
5617
5618 case IEMMODE_32BIT:
5619 IEM_MC_BEGIN(4, 0);
5620 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5621 IEM_MC_ARG(uint32_t, u32Src, 1);
5622 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5623 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5624
5625 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5626 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5627 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5628 IEM_MC_REF_EFLAGS(pEFlags);
5629 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5630
5631 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5632 IEM_MC_ADVANCE_RIP();
5633 IEM_MC_END();
5634 return VINF_SUCCESS;
5635
5636 case IEMMODE_64BIT:
5637 IEM_MC_BEGIN(4, 0);
5638 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5639 IEM_MC_ARG(uint64_t, u64Src, 1);
5640 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5641 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5642
5643 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5644 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5645 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5646 IEM_MC_REF_EFLAGS(pEFlags);
5647 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5648
5649 IEM_MC_ADVANCE_RIP();
5650 IEM_MC_END();
5651 return VINF_SUCCESS;
5652
5653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5654 }
5655 }
5656 else
5657 {
5658 switch (pVCpu->iem.s.enmEffOpSize)
5659 {
5660 case IEMMODE_16BIT:
5661 IEM_MC_BEGIN(4, 2);
5662 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5663 IEM_MC_ARG(uint16_t, u16Src, 1);
5664 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5665 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5667
5668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5670 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5671 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5672 IEM_MC_FETCH_EFLAGS(EFlags);
5673 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5674 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5675
5676 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5677 IEM_MC_COMMIT_EFLAGS(EFlags);
5678 IEM_MC_ADVANCE_RIP();
5679 IEM_MC_END();
5680 return VINF_SUCCESS;
5681
5682 case IEMMODE_32BIT:
5683 IEM_MC_BEGIN(4, 2);
5684 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5685 IEM_MC_ARG(uint32_t, u32Src, 1);
5686 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5687 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5689
5690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5692 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5693 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5694 IEM_MC_FETCH_EFLAGS(EFlags);
5695 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5696 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5697
5698 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5699 IEM_MC_COMMIT_EFLAGS(EFlags);
5700 IEM_MC_ADVANCE_RIP();
5701 IEM_MC_END();
5702 return VINF_SUCCESS;
5703
5704 case IEMMODE_64BIT:
5705 IEM_MC_BEGIN(4, 2);
5706 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5707 IEM_MC_ARG(uint64_t, u64Src, 1);
5708 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5709 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5711
5712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5714 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5715 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5716 IEM_MC_FETCH_EFLAGS(EFlags);
5717 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5718 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5719
5720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5721 IEM_MC_COMMIT_EFLAGS(EFlags);
5722 IEM_MC_ADVANCE_RIP();
5723 IEM_MC_END();
5724 return VINF_SUCCESS;
5725
5726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5727 }
5728 }
5729}
5730
5731
5732
5733/** Opcode 0x0f 0xa4. */
5734FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5735{
5736 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5737 IEMOP_HLP_MIN_386();
5738 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5739}
5740
5741
5742/** Opcode 0x0f 0xa5. */
5743FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5744{
5745 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5746 IEMOP_HLP_MIN_386();
5747 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5748}
5749
5750
5751/** Opcode 0x0f 0xa8. */
5752FNIEMOP_DEF(iemOp_push_gs)
5753{
5754 IEMOP_MNEMONIC(push_gs, "push gs");
5755 IEMOP_HLP_MIN_386();
5756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5757 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5758}
5759
5760
5761/** Opcode 0x0f 0xa9. */
5762FNIEMOP_DEF(iemOp_pop_gs)
5763{
5764 IEMOP_MNEMONIC(pop_gs, "pop gs");
5765 IEMOP_HLP_MIN_386();
5766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5767 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5768}
5769
5770
5771/** Opcode 0x0f 0xaa. */
5772FNIEMOP_STUB(iemOp_rsm);
5773//IEMOP_HLP_MIN_386();
5774
5775
5776/** Opcode 0x0f 0xab. */
5777FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5778{
5779 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5780 IEMOP_HLP_MIN_386();
5781 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5782}
5783
5784
5785/** Opcode 0x0f 0xac. */
5786FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5787{
5788 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5789 IEMOP_HLP_MIN_386();
5790 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5791}
5792
5793
5794/** Opcode 0x0f 0xad. */
5795FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5796{
5797 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5798 IEMOP_HLP_MIN_386();
5799 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5800}
5801
5802
5803/** Opcode 0x0f 0xae mem/0. */
5804FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5805{
5806 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5807 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5808 return IEMOP_RAISE_INVALID_OPCODE();
5809
5810 IEM_MC_BEGIN(3, 1);
5811 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5812 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5813 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5816 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5817 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5818 IEM_MC_END();
5819 return VINF_SUCCESS;
5820}
5821
5822
5823/** Opcode 0x0f 0xae mem/1. */
5824FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5825{
5826 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5827 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5828 return IEMOP_RAISE_INVALID_OPCODE();
5829
5830 IEM_MC_BEGIN(3, 1);
5831 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5832 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5833 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5836 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5837 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5838 IEM_MC_END();
5839 return VINF_SUCCESS;
5840}
5841
5842
5843/** Opcode 0x0f 0xae mem/2. */
5844FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5845
5846/** Opcode 0x0f 0xae mem/3. */
5847FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5848
5849/** Opcode 0x0f 0xae mem/4. */
5850FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5851
5852/** Opcode 0x0f 0xae mem/5. */
5853FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5854
5855/** Opcode 0x0f 0xae mem/6. */
5856FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5857
5858/** Opcode 0x0f 0xae mem/7. */
5859FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5860
5861
5862/** Opcode 0x0f 0xae 11b/5. */
5863FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5864{
5865 RT_NOREF_PV(bRm);
5866 IEMOP_MNEMONIC(lfence, "lfence");
5867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5868 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5869 return IEMOP_RAISE_INVALID_OPCODE();
5870
5871 IEM_MC_BEGIN(0, 0);
5872 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5873 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5874 else
5875 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5876 IEM_MC_ADVANCE_RIP();
5877 IEM_MC_END();
5878 return VINF_SUCCESS;
5879}
5880
5881
5882/** Opcode 0x0f 0xae 11b/6. */
5883FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5884{
5885 RT_NOREF_PV(bRm);
5886 IEMOP_MNEMONIC(mfence, "mfence");
5887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5888 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5889 return IEMOP_RAISE_INVALID_OPCODE();
5890
5891 IEM_MC_BEGIN(0, 0);
5892 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5893 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5894 else
5895 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5896 IEM_MC_ADVANCE_RIP();
5897 IEM_MC_END();
5898 return VINF_SUCCESS;
5899}
5900
5901
5902/** Opcode 0x0f 0xae 11b/7. */
5903FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5904{
5905 RT_NOREF_PV(bRm);
5906 IEMOP_MNEMONIC(sfence, "sfence");
5907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5908 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5909 return IEMOP_RAISE_INVALID_OPCODE();
5910
5911 IEM_MC_BEGIN(0, 0);
5912 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5913 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5914 else
5915 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5916 IEM_MC_ADVANCE_RIP();
5917 IEM_MC_END();
5918 return VINF_SUCCESS;
5919}
5920
5921
5922/** Opcode 0xf3 0x0f 0xae 11b/0. */
5923FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5924
5925/** Opcode 0xf3 0x0f 0xae 11b/1. */
5926FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5927
5928/** Opcode 0xf3 0x0f 0xae 11b/2. */
5929FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5930
5931/** Opcode 0xf3 0x0f 0xae 11b/3. */
5932FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5933
5934
5935/** Opcode 0x0f 0xae. */
5936FNIEMOP_DEF(iemOp_Grp15)
5937{
5938 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5940 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5941 {
5942 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5943 {
5944 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5945 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5946 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5947 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5948 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5949 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5950 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5951 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5953 }
5954 }
5955 else
5956 {
5957 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5958 {
5959 case 0:
5960 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5961 {
5962 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5963 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5964 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5965 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5966 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5967 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5968 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5969 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5971 }
5972 break;
5973
5974 case IEM_OP_PRF_REPZ:
5975 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5976 {
5977 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5978 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5979 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5980 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5981 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5982 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5983 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5984 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5986 }
5987 break;
5988
5989 default:
5990 return IEMOP_RAISE_INVALID_OPCODE();
5991 }
5992 }
5993}
5994
5995
5996/** Opcode 0x0f 0xaf. */
5997FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5998{
5999 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6000 IEMOP_HLP_MIN_386();
6001 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6002 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6003}
6004
6005
6006/** Opcode 0x0f 0xb0. */
6007FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6008{
6009 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6010 IEMOP_HLP_MIN_486();
6011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6012
6013 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6014 {
6015 IEMOP_HLP_DONE_DECODING();
6016 IEM_MC_BEGIN(4, 0);
6017 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6018 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6019 IEM_MC_ARG(uint8_t, u8Src, 2);
6020 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6021
6022 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6023 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6024 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6025 IEM_MC_REF_EFLAGS(pEFlags);
6026 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6027 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6028 else
6029 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6030
6031 IEM_MC_ADVANCE_RIP();
6032 IEM_MC_END();
6033 }
6034 else
6035 {
6036 IEM_MC_BEGIN(4, 3);
6037 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6038 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6039 IEM_MC_ARG(uint8_t, u8Src, 2);
6040 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6042 IEM_MC_LOCAL(uint8_t, u8Al);
6043
6044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6045 IEMOP_HLP_DONE_DECODING();
6046 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6047 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6048 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6049 IEM_MC_FETCH_EFLAGS(EFlags);
6050 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6051 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6052 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6053 else
6054 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6055
6056 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6057 IEM_MC_COMMIT_EFLAGS(EFlags);
6058 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6059 IEM_MC_ADVANCE_RIP();
6060 IEM_MC_END();
6061 }
6062 return VINF_SUCCESS;
6063}
6064
6065/** Opcode 0x0f 0xb1. */
6066FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6067{
6068 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6069 IEMOP_HLP_MIN_486();
6070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6071
6072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6073 {
6074 IEMOP_HLP_DONE_DECODING();
6075 switch (pVCpu->iem.s.enmEffOpSize)
6076 {
6077 case IEMMODE_16BIT:
6078 IEM_MC_BEGIN(4, 0);
6079 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6080 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6081 IEM_MC_ARG(uint16_t, u16Src, 2);
6082 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6083
6084 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6085 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6086 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6087 IEM_MC_REF_EFLAGS(pEFlags);
6088 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6089 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6090 else
6091 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6092
6093 IEM_MC_ADVANCE_RIP();
6094 IEM_MC_END();
6095 return VINF_SUCCESS;
6096
6097 case IEMMODE_32BIT:
6098 IEM_MC_BEGIN(4, 0);
6099 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6100 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6101 IEM_MC_ARG(uint32_t, u32Src, 2);
6102 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6103
6104 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6105 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6106 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6107 IEM_MC_REF_EFLAGS(pEFlags);
6108 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6109 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6110 else
6111 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6112
6113 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6114 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6115 IEM_MC_ADVANCE_RIP();
6116 IEM_MC_END();
6117 return VINF_SUCCESS;
6118
6119 case IEMMODE_64BIT:
6120 IEM_MC_BEGIN(4, 0);
6121 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6122 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6123#ifdef RT_ARCH_X86
6124 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6125#else
6126 IEM_MC_ARG(uint64_t, u64Src, 2);
6127#endif
6128 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6129
6130 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6131 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6132 IEM_MC_REF_EFLAGS(pEFlags);
6133#ifdef RT_ARCH_X86
6134 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6135 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6136 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6137 else
6138 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6139#else
6140 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6141 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6142 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6143 else
6144 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6145#endif
6146
6147 IEM_MC_ADVANCE_RIP();
6148 IEM_MC_END();
6149 return VINF_SUCCESS;
6150
6151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6152 }
6153 }
6154 else
6155 {
6156 switch (pVCpu->iem.s.enmEffOpSize)
6157 {
6158 case IEMMODE_16BIT:
6159 IEM_MC_BEGIN(4, 3);
6160 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6161 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6162 IEM_MC_ARG(uint16_t, u16Src, 2);
6163 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6165 IEM_MC_LOCAL(uint16_t, u16Ax);
6166
6167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6168 IEMOP_HLP_DONE_DECODING();
6169 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6170 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6171 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6172 IEM_MC_FETCH_EFLAGS(EFlags);
6173 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6174 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6175 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6176 else
6177 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6178
6179 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6180 IEM_MC_COMMIT_EFLAGS(EFlags);
6181 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6182 IEM_MC_ADVANCE_RIP();
6183 IEM_MC_END();
6184 return VINF_SUCCESS;
6185
6186 case IEMMODE_32BIT:
6187 IEM_MC_BEGIN(4, 3);
6188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6189 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6190 IEM_MC_ARG(uint32_t, u32Src, 2);
6191 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6193 IEM_MC_LOCAL(uint32_t, u32Eax);
6194
6195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6196 IEMOP_HLP_DONE_DECODING();
6197 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6198 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6199 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6200 IEM_MC_FETCH_EFLAGS(EFlags);
6201 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6202 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6203 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6204 else
6205 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6206
6207 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6208 IEM_MC_COMMIT_EFLAGS(EFlags);
6209 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6210 IEM_MC_ADVANCE_RIP();
6211 IEM_MC_END();
6212 return VINF_SUCCESS;
6213
6214 case IEMMODE_64BIT:
6215 IEM_MC_BEGIN(4, 3);
6216 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6217 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6218#ifdef RT_ARCH_X86
6219 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6220#else
6221 IEM_MC_ARG(uint64_t, u64Src, 2);
6222#endif
6223 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6225 IEM_MC_LOCAL(uint64_t, u64Rax);
6226
6227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6228 IEMOP_HLP_DONE_DECODING();
6229 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6230 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6231 IEM_MC_FETCH_EFLAGS(EFlags);
6232 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6233#ifdef RT_ARCH_X86
6234 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6235 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6236 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6237 else
6238 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6239#else
6240 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6241 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6242 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6243 else
6244 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6245#endif
6246
6247 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6248 IEM_MC_COMMIT_EFLAGS(EFlags);
6249 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6250 IEM_MC_ADVANCE_RIP();
6251 IEM_MC_END();
6252 return VINF_SUCCESS;
6253
6254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6255 }
6256 }
6257}
6258
6259
6260FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6261{
6262 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6263 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6264
6265 switch (pVCpu->iem.s.enmEffOpSize)
6266 {
6267 case IEMMODE_16BIT:
6268 IEM_MC_BEGIN(5, 1);
6269 IEM_MC_ARG(uint16_t, uSel, 0);
6270 IEM_MC_ARG(uint16_t, offSeg, 1);
6271 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6272 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6273 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6274 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6277 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6278 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6279 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6280 IEM_MC_END();
6281 return VINF_SUCCESS;
6282
6283 case IEMMODE_32BIT:
6284 IEM_MC_BEGIN(5, 1);
6285 IEM_MC_ARG(uint16_t, uSel, 0);
6286 IEM_MC_ARG(uint32_t, offSeg, 1);
6287 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6288 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6289 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6290 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6293 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6294 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6295 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6296 IEM_MC_END();
6297 return VINF_SUCCESS;
6298
6299 case IEMMODE_64BIT:
6300 IEM_MC_BEGIN(5, 1);
6301 IEM_MC_ARG(uint16_t, uSel, 0);
6302 IEM_MC_ARG(uint64_t, offSeg, 1);
6303 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6304 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6305 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6306 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6309 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6310 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6311 else
6312 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6313 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6314 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6315 IEM_MC_END();
6316 return VINF_SUCCESS;
6317
6318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6319 }
6320}
6321
6322
6323/** Opcode 0x0f 0xb2. */
6324FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6325{
6326 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6327 IEMOP_HLP_MIN_386();
6328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6330 return IEMOP_RAISE_INVALID_OPCODE();
6331 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6332}
6333
6334
6335/** Opcode 0x0f 0xb3. */
6336FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6337{
6338 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6339 IEMOP_HLP_MIN_386();
6340 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6341}
6342
6343
6344/** Opcode 0x0f 0xb4. */
6345FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6346{
6347 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6348 IEMOP_HLP_MIN_386();
6349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6350 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6351 return IEMOP_RAISE_INVALID_OPCODE();
6352 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6353}
6354
6355
6356/** Opcode 0x0f 0xb5. */
6357FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6358{
6359 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6360 IEMOP_HLP_MIN_386();
6361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6363 return IEMOP_RAISE_INVALID_OPCODE();
6364 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6365}
6366
6367
6368/** Opcode 0x0f 0xb6. */
6369FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6370{
6371 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6372 IEMOP_HLP_MIN_386();
6373
6374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6375
6376 /*
6377 * If rm is denoting a register, no more instruction bytes.
6378 */
6379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6380 {
6381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6382 switch (pVCpu->iem.s.enmEffOpSize)
6383 {
6384 case IEMMODE_16BIT:
6385 IEM_MC_BEGIN(0, 1);
6386 IEM_MC_LOCAL(uint16_t, u16Value);
6387 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6388 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6389 IEM_MC_ADVANCE_RIP();
6390 IEM_MC_END();
6391 return VINF_SUCCESS;
6392
6393 case IEMMODE_32BIT:
6394 IEM_MC_BEGIN(0, 1);
6395 IEM_MC_LOCAL(uint32_t, u32Value);
6396 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6397 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6398 IEM_MC_ADVANCE_RIP();
6399 IEM_MC_END();
6400 return VINF_SUCCESS;
6401
6402 case IEMMODE_64BIT:
6403 IEM_MC_BEGIN(0, 1);
6404 IEM_MC_LOCAL(uint64_t, u64Value);
6405 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6406 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6407 IEM_MC_ADVANCE_RIP();
6408 IEM_MC_END();
6409 return VINF_SUCCESS;
6410
6411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6412 }
6413 }
6414 else
6415 {
6416 /*
6417 * We're loading a register from memory.
6418 */
6419 switch (pVCpu->iem.s.enmEffOpSize)
6420 {
6421 case IEMMODE_16BIT:
6422 IEM_MC_BEGIN(0, 2);
6423 IEM_MC_LOCAL(uint16_t, u16Value);
6424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6427 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6428 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6429 IEM_MC_ADVANCE_RIP();
6430 IEM_MC_END();
6431 return VINF_SUCCESS;
6432
6433 case IEMMODE_32BIT:
6434 IEM_MC_BEGIN(0, 2);
6435 IEM_MC_LOCAL(uint32_t, u32Value);
6436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6439 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6440 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6441 IEM_MC_ADVANCE_RIP();
6442 IEM_MC_END();
6443 return VINF_SUCCESS;
6444
6445 case IEMMODE_64BIT:
6446 IEM_MC_BEGIN(0, 2);
6447 IEM_MC_LOCAL(uint64_t, u64Value);
6448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6451 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6452 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6453 IEM_MC_ADVANCE_RIP();
6454 IEM_MC_END();
6455 return VINF_SUCCESS;
6456
6457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6458 }
6459 }
6460}
6461
6462
6463/** Opcode 0x0f 0xb7. */
6464FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6465{
6466 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6467 IEMOP_HLP_MIN_386();
6468
6469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6470
6471 /** @todo Not entirely sure how the operand size prefix is handled here,
6472 * assuming that it will be ignored. Would be nice to have a few
6473 * test for this. */
6474 /*
6475 * If rm is denoting a register, no more instruction bytes.
6476 */
6477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6478 {
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6481 {
6482 IEM_MC_BEGIN(0, 1);
6483 IEM_MC_LOCAL(uint32_t, u32Value);
6484 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6485 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6486 IEM_MC_ADVANCE_RIP();
6487 IEM_MC_END();
6488 }
6489 else
6490 {
6491 IEM_MC_BEGIN(0, 1);
6492 IEM_MC_LOCAL(uint64_t, u64Value);
6493 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6494 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6495 IEM_MC_ADVANCE_RIP();
6496 IEM_MC_END();
6497 }
6498 }
6499 else
6500 {
6501 /*
6502 * We're loading a register from memory.
6503 */
6504 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6505 {
6506 IEM_MC_BEGIN(0, 2);
6507 IEM_MC_LOCAL(uint32_t, u32Value);
6508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6511 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6512 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6513 IEM_MC_ADVANCE_RIP();
6514 IEM_MC_END();
6515 }
6516 else
6517 {
6518 IEM_MC_BEGIN(0, 2);
6519 IEM_MC_LOCAL(uint64_t, u64Value);
6520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6523 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6524 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6525 IEM_MC_ADVANCE_RIP();
6526 IEM_MC_END();
6527 }
6528 }
6529 return VINF_SUCCESS;
6530}
6531
6532
6533/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6534FNIEMOP_UD_STUB(iemOp_jmpe);
6535/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6536FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6537
6538
6539/** Opcode 0x0f 0xb9. */
6540FNIEMOP_DEF(iemOp_Grp10)
6541{
6542 Log(("iemOp_Grp10 -> #UD\n"));
6543 return IEMOP_RAISE_INVALID_OPCODE();
6544}
6545
6546
6547/** Opcode 0x0f 0xba. */
6548FNIEMOP_DEF(iemOp_Grp8)
6549{
6550 IEMOP_HLP_MIN_386();
6551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6552 PCIEMOPBINSIZES pImpl;
6553 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6554 {
6555 case 0: case 1: case 2: case 3:
6556 return IEMOP_RAISE_INVALID_OPCODE();
6557 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6558 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6559 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6560 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6562 }
6563 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6564
6565 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6566 {
6567 /* register destination. */
6568 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6570
6571 switch (pVCpu->iem.s.enmEffOpSize)
6572 {
6573 case IEMMODE_16BIT:
6574 IEM_MC_BEGIN(3, 0);
6575 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6576 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6577 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6578
6579 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6580 IEM_MC_REF_EFLAGS(pEFlags);
6581 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6582
6583 IEM_MC_ADVANCE_RIP();
6584 IEM_MC_END();
6585 return VINF_SUCCESS;
6586
6587 case IEMMODE_32BIT:
6588 IEM_MC_BEGIN(3, 0);
6589 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6590 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6591 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6592
6593 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6594 IEM_MC_REF_EFLAGS(pEFlags);
6595 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6596
6597 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6598 IEM_MC_ADVANCE_RIP();
6599 IEM_MC_END();
6600 return VINF_SUCCESS;
6601
6602 case IEMMODE_64BIT:
6603 IEM_MC_BEGIN(3, 0);
6604 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6605 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6606 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6607
6608 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6609 IEM_MC_REF_EFLAGS(pEFlags);
6610 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6611
6612 IEM_MC_ADVANCE_RIP();
6613 IEM_MC_END();
6614 return VINF_SUCCESS;
6615
6616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6617 }
6618 }
6619 else
6620 {
6621 /* memory destination. */
6622
6623 uint32_t fAccess;
6624 if (pImpl->pfnLockedU16)
6625 fAccess = IEM_ACCESS_DATA_RW;
6626 else /* BT */
6627 fAccess = IEM_ACCESS_DATA_R;
6628
6629 /** @todo test negative bit offsets! */
6630 switch (pVCpu->iem.s.enmEffOpSize)
6631 {
6632 case IEMMODE_16BIT:
6633 IEM_MC_BEGIN(3, 1);
6634 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6635 IEM_MC_ARG(uint16_t, u16Src, 1);
6636 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6638
6639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6640 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6641 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6642 if (pImpl->pfnLockedU16)
6643 IEMOP_HLP_DONE_DECODING();
6644 else
6645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6646 IEM_MC_FETCH_EFLAGS(EFlags);
6647 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6648 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6649 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6650 else
6651 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6653
6654 IEM_MC_COMMIT_EFLAGS(EFlags);
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 return VINF_SUCCESS;
6658
6659 case IEMMODE_32BIT:
6660 IEM_MC_BEGIN(3, 1);
6661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6662 IEM_MC_ARG(uint32_t, u32Src, 1);
6663 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6665
6666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6667 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6668 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6669 if (pImpl->pfnLockedU16)
6670 IEMOP_HLP_DONE_DECODING();
6671 else
6672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6673 IEM_MC_FETCH_EFLAGS(EFlags);
6674 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6675 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6676 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6677 else
6678 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6680
6681 IEM_MC_COMMIT_EFLAGS(EFlags);
6682 IEM_MC_ADVANCE_RIP();
6683 IEM_MC_END();
6684 return VINF_SUCCESS;
6685
6686 case IEMMODE_64BIT:
6687 IEM_MC_BEGIN(3, 1);
6688 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6689 IEM_MC_ARG(uint64_t, u64Src, 1);
6690 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6692
6693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6694 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6695 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6696 if (pImpl->pfnLockedU16)
6697 IEMOP_HLP_DONE_DECODING();
6698 else
6699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6700 IEM_MC_FETCH_EFLAGS(EFlags);
6701 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6702 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6703 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6704 else
6705 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6706 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6707
6708 IEM_MC_COMMIT_EFLAGS(EFlags);
6709 IEM_MC_ADVANCE_RIP();
6710 IEM_MC_END();
6711 return VINF_SUCCESS;
6712
6713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6714 }
6715 }
6716
6717}
6718
6719
6720/** Opcode 0x0f 0xbb. */
6721FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6722{
6723 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6724 IEMOP_HLP_MIN_386();
6725 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6726}
6727
6728
6729/** Opcode 0x0f 0xbc. */
6730FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6731{
6732 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6733 IEMOP_HLP_MIN_386();
6734 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6735 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6736}
6737
6738
6739/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6740FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6741
6742
6743/** Opcode 0x0f 0xbd. */
6744FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6745{
6746 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6747 IEMOP_HLP_MIN_386();
6748 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6750}
6751
6752
6753/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6754FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6755
6756
6757/** Opcode 0x0f 0xbe. */
6758FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6759{
6760 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6761 IEMOP_HLP_MIN_386();
6762
6763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6764
6765 /*
6766 * If rm is denoting a register, no more instruction bytes.
6767 */
6768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6769 {
6770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6771 switch (pVCpu->iem.s.enmEffOpSize)
6772 {
6773 case IEMMODE_16BIT:
6774 IEM_MC_BEGIN(0, 1);
6775 IEM_MC_LOCAL(uint16_t, u16Value);
6776 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6777 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6778 IEM_MC_ADVANCE_RIP();
6779 IEM_MC_END();
6780 return VINF_SUCCESS;
6781
6782 case IEMMODE_32BIT:
6783 IEM_MC_BEGIN(0, 1);
6784 IEM_MC_LOCAL(uint32_t, u32Value);
6785 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6786 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6787 IEM_MC_ADVANCE_RIP();
6788 IEM_MC_END();
6789 return VINF_SUCCESS;
6790
6791 case IEMMODE_64BIT:
6792 IEM_MC_BEGIN(0, 1);
6793 IEM_MC_LOCAL(uint64_t, u64Value);
6794 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6795 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6796 IEM_MC_ADVANCE_RIP();
6797 IEM_MC_END();
6798 return VINF_SUCCESS;
6799
6800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6801 }
6802 }
6803 else
6804 {
6805 /*
6806 * We're loading a register from memory.
6807 */
6808 switch (pVCpu->iem.s.enmEffOpSize)
6809 {
6810 case IEMMODE_16BIT:
6811 IEM_MC_BEGIN(0, 2);
6812 IEM_MC_LOCAL(uint16_t, u16Value);
6813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6816 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6817 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6818 IEM_MC_ADVANCE_RIP();
6819 IEM_MC_END();
6820 return VINF_SUCCESS;
6821
6822 case IEMMODE_32BIT:
6823 IEM_MC_BEGIN(0, 2);
6824 IEM_MC_LOCAL(uint32_t, u32Value);
6825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6828 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6829 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6830 IEM_MC_ADVANCE_RIP();
6831 IEM_MC_END();
6832 return VINF_SUCCESS;
6833
6834 case IEMMODE_64BIT:
6835 IEM_MC_BEGIN(0, 2);
6836 IEM_MC_LOCAL(uint64_t, u64Value);
6837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6840 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6841 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6842 IEM_MC_ADVANCE_RIP();
6843 IEM_MC_END();
6844 return VINF_SUCCESS;
6845
6846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6847 }
6848 }
6849}
6850
6851
6852/** Opcode 0x0f 0xbf. */
6853FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6854{
6855 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6856 IEMOP_HLP_MIN_386();
6857
6858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6859
6860 /** @todo Not entirely sure how the operand size prefix is handled here,
6861 * assuming that it will be ignored. Would be nice to have a few
6862 * test for this. */
6863 /*
6864 * If rm is denoting a register, no more instruction bytes.
6865 */
6866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6867 {
6868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6869 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6870 {
6871 IEM_MC_BEGIN(0, 1);
6872 IEM_MC_LOCAL(uint32_t, u32Value);
6873 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6874 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6875 IEM_MC_ADVANCE_RIP();
6876 IEM_MC_END();
6877 }
6878 else
6879 {
6880 IEM_MC_BEGIN(0, 1);
6881 IEM_MC_LOCAL(uint64_t, u64Value);
6882 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6883 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6884 IEM_MC_ADVANCE_RIP();
6885 IEM_MC_END();
6886 }
6887 }
6888 else
6889 {
6890 /*
6891 * We're loading a register from memory.
6892 */
6893 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6894 {
6895 IEM_MC_BEGIN(0, 2);
6896 IEM_MC_LOCAL(uint32_t, u32Value);
6897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6900 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6901 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6902 IEM_MC_ADVANCE_RIP();
6903 IEM_MC_END();
6904 }
6905 else
6906 {
6907 IEM_MC_BEGIN(0, 2);
6908 IEM_MC_LOCAL(uint64_t, u64Value);
6909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6912 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6913 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6914 IEM_MC_ADVANCE_RIP();
6915 IEM_MC_END();
6916 }
6917 }
6918 return VINF_SUCCESS;
6919}
6920
6921
6922/** Opcode 0x0f 0xc0. */
6923FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6924{
6925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6926 IEMOP_HLP_MIN_486();
6927 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6928
6929 /*
6930 * If rm is denoting a register, no more instruction bytes.
6931 */
6932 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6933 {
6934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6935
6936 IEM_MC_BEGIN(3, 0);
6937 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6938 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6939 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6940
6941 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6942 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6943 IEM_MC_REF_EFLAGS(pEFlags);
6944 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6945
6946 IEM_MC_ADVANCE_RIP();
6947 IEM_MC_END();
6948 }
6949 else
6950 {
6951 /*
6952 * We're accessing memory.
6953 */
6954 IEM_MC_BEGIN(3, 3);
6955 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6956 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6957 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6958 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6960
6961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6962 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6963 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6964 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6965 IEM_MC_FETCH_EFLAGS(EFlags);
6966 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6967 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6968 else
6969 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6970
6971 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6972 IEM_MC_COMMIT_EFLAGS(EFlags);
6973 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6974 IEM_MC_ADVANCE_RIP();
6975 IEM_MC_END();
6976 return VINF_SUCCESS;
6977 }
6978 return VINF_SUCCESS;
6979}
6980
6981
6982/** Opcode 0x0f 0xc1. */
6983FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6984{
6985 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6986 IEMOP_HLP_MIN_486();
6987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6988
6989 /*
6990 * If rm is denoting a register, no more instruction bytes.
6991 */
6992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6993 {
6994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6995
6996 switch (pVCpu->iem.s.enmEffOpSize)
6997 {
6998 case IEMMODE_16BIT:
6999 IEM_MC_BEGIN(3, 0);
7000 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7001 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7003
7004 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7005 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7006 IEM_MC_REF_EFLAGS(pEFlags);
7007 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7008
7009 IEM_MC_ADVANCE_RIP();
7010 IEM_MC_END();
7011 return VINF_SUCCESS;
7012
7013 case IEMMODE_32BIT:
7014 IEM_MC_BEGIN(3, 0);
7015 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7016 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7017 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7018
7019 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7020 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7021 IEM_MC_REF_EFLAGS(pEFlags);
7022 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7023
7024 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7025 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7026 IEM_MC_ADVANCE_RIP();
7027 IEM_MC_END();
7028 return VINF_SUCCESS;
7029
7030 case IEMMODE_64BIT:
7031 IEM_MC_BEGIN(3, 0);
7032 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7033 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7034 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7035
7036 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7037 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7038 IEM_MC_REF_EFLAGS(pEFlags);
7039 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7040
7041 IEM_MC_ADVANCE_RIP();
7042 IEM_MC_END();
7043 return VINF_SUCCESS;
7044
7045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7046 }
7047 }
7048 else
7049 {
7050 /*
7051 * We're accessing memory.
7052 */
7053 switch (pVCpu->iem.s.enmEffOpSize)
7054 {
7055 case IEMMODE_16BIT:
7056 IEM_MC_BEGIN(3, 3);
7057 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7058 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7059 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7060 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7062
7063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7064 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7065 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7066 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7067 IEM_MC_FETCH_EFLAGS(EFlags);
7068 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7069 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7070 else
7071 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7072
7073 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7074 IEM_MC_COMMIT_EFLAGS(EFlags);
7075 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7076 IEM_MC_ADVANCE_RIP();
7077 IEM_MC_END();
7078 return VINF_SUCCESS;
7079
7080 case IEMMODE_32BIT:
7081 IEM_MC_BEGIN(3, 3);
7082 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7083 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7084 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7085 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7087
7088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7089 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7090 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7091 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7092 IEM_MC_FETCH_EFLAGS(EFlags);
7093 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7094 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7095 else
7096 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7097
7098 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7099 IEM_MC_COMMIT_EFLAGS(EFlags);
7100 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7101 IEM_MC_ADVANCE_RIP();
7102 IEM_MC_END();
7103 return VINF_SUCCESS;
7104
7105 case IEMMODE_64BIT:
7106 IEM_MC_BEGIN(3, 3);
7107 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7108 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7109 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7110 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7112
7113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7114 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7115 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7116 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7117 IEM_MC_FETCH_EFLAGS(EFlags);
7118 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7119 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7120 else
7121 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7122
7123 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7124 IEM_MC_COMMIT_EFLAGS(EFlags);
7125 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7126 IEM_MC_ADVANCE_RIP();
7127 IEM_MC_END();
7128 return VINF_SUCCESS;
7129
7130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7131 }
7132 }
7133}
7134
7135
7136/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7137FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7138/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7139FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7140/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7141FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7142/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7143FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7144
7145
7146/** Opcode 0x0f 0xc3. */
7147FNIEMOP_DEF(iemOp_movnti_My_Gy)
7148{
7149 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7150
7151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7152
7153 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7154 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7155 {
7156 switch (pVCpu->iem.s.enmEffOpSize)
7157 {
7158 case IEMMODE_32BIT:
7159 IEM_MC_BEGIN(0, 2);
7160 IEM_MC_LOCAL(uint32_t, u32Value);
7161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7162
7163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7165 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7166 return IEMOP_RAISE_INVALID_OPCODE();
7167
7168 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7169 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7170 IEM_MC_ADVANCE_RIP();
7171 IEM_MC_END();
7172 break;
7173
7174 case IEMMODE_64BIT:
7175 IEM_MC_BEGIN(0, 2);
7176 IEM_MC_LOCAL(uint64_t, u64Value);
7177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7178
7179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7181 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7182 return IEMOP_RAISE_INVALID_OPCODE();
7183
7184 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7185 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7186 IEM_MC_ADVANCE_RIP();
7187 IEM_MC_END();
7188 break;
7189
7190 case IEMMODE_16BIT:
7191 /** @todo check this form. */
7192 return IEMOP_RAISE_INVALID_OPCODE();
7193 }
7194 }
7195 else
7196 return IEMOP_RAISE_INVALID_OPCODE();
7197 return VINF_SUCCESS;
7198}
7199/* Opcode 0x66 0x0f 0xc3 - invalid */
7200/* Opcode 0xf3 0x0f 0xc3 - invalid */
7201/* Opcode 0xf2 0x0f 0xc3 - invalid */
7202
7203/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7204FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7205/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7206FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7207/* Opcode 0xf3 0x0f 0xc4 - invalid */
7208/* Opcode 0xf2 0x0f 0xc4 - invalid */
7209
7210/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7211FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7212/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7213FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7214/* Opcode 0xf3 0x0f 0xc5 - invalid */
7215/* Opcode 0xf2 0x0f 0xc5 - invalid */
7216
7217/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7218FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7219/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7220FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7221/* Opcode 0xf3 0x0f 0xc6 - invalid */
7222/* Opcode 0xf2 0x0f 0xc6 - invalid */
7223
7224
7225/** Opcode 0x0f 0xc7 !11/1. */
7226FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7227{
7228 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7229
7230 IEM_MC_BEGIN(4, 3);
7231 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7232 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7233 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7234 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7235 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7236 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7238
7239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7240 IEMOP_HLP_DONE_DECODING();
7241 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7242
7243 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7244 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7245 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7246
7247 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7248 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7249 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7250
7251 IEM_MC_FETCH_EFLAGS(EFlags);
7252 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7253 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7254 else
7255 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7256
7257 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7258 IEM_MC_COMMIT_EFLAGS(EFlags);
7259 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7260 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7261 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7262 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7263 IEM_MC_ENDIF();
7264 IEM_MC_ADVANCE_RIP();
7265
7266 IEM_MC_END();
7267 return VINF_SUCCESS;
7268}
7269
7270
7271/** Opcode REX.W 0x0f 0xc7 !11/1. */
7272FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7273{
7274 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7275 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7276 {
7277#if 0
7278 RT_NOREF(bRm);
7279 IEMOP_BITCH_ABOUT_STUB();
7280 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7281#else
7282 IEM_MC_BEGIN(4, 3);
7283 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7284 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7285 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7286 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7287 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7288 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7290
7291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7292 IEMOP_HLP_DONE_DECODING();
7293 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7294 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7295
7296 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7297 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7298 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7299
7300 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7301 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7302 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7303
7304 IEM_MC_FETCH_EFLAGS(EFlags);
7305# ifdef RT_ARCH_AMD64
7306 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7307 {
7308 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7309 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7310 else
7311 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7312 }
7313 else
7314# endif
7315 {
7316 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7317 accesses and not all all atomic, which works fine on in UNI CPU guest
7318 configuration (ignoring DMA). If guest SMP is active we have no choice
7319 but to use a rendezvous callback here. Sigh. */
7320 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7321 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7322 else
7323 {
7324 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7325 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7326 }
7327 }
7328
7329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7330 IEM_MC_COMMIT_EFLAGS(EFlags);
7331 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7332 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7333 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7334 IEM_MC_ENDIF();
7335 IEM_MC_ADVANCE_RIP();
7336
7337 IEM_MC_END();
7338 return VINF_SUCCESS;
7339#endif
7340 }
7341 Log(("cmpxchg16b -> #UD\n"));
7342 return IEMOP_RAISE_INVALID_OPCODE();
7343}
7344
7345
7346/** Opcode 0x0f 0xc7 11/6. */
7347FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7348
7349/** Opcode 0x0f 0xc7 !11/6. */
7350FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7351
7352/** Opcode 0x66 0x0f 0xc7 !11/6. */
7353FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7354
7355/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7356FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7357
7358/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7359FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7360
7361
7362/** Opcode 0x0f 0xc7. */
7363FNIEMOP_DEF(iemOp_Grp9)
7364{
7365 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7367 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7368 {
7369 case 0: case 2: case 3: case 4: case 5:
7370 return IEMOP_RAISE_INVALID_OPCODE();
7371 case 1:
7372 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7373 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7374 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7375 return IEMOP_RAISE_INVALID_OPCODE();
7376 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7377 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7378 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7379 case 6:
7380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7381 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7382 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7383 {
7384 case 0:
7385 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7386 case IEM_OP_PRF_SIZE_OP:
7387 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7388 case IEM_OP_PRF_REPZ:
7389 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7390 default:
7391 return IEMOP_RAISE_INVALID_OPCODE();
7392 }
7393 case 7:
7394 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7395 {
7396 case 0:
7397 case IEM_OP_PRF_REPZ:
7398 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7399 default:
7400 return IEMOP_RAISE_INVALID_OPCODE();
7401 }
7402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7403 }
7404}
7405
7406
7407/**
7408 * Common 'bswap register' helper.
7409 */
7410FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7411{
7412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7413 switch (pVCpu->iem.s.enmEffOpSize)
7414 {
7415 case IEMMODE_16BIT:
7416 IEM_MC_BEGIN(1, 0);
7417 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7418 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7419 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7420 IEM_MC_ADVANCE_RIP();
7421 IEM_MC_END();
7422 return VINF_SUCCESS;
7423
7424 case IEMMODE_32BIT:
7425 IEM_MC_BEGIN(1, 0);
7426 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7427 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7428 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7429 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7430 IEM_MC_ADVANCE_RIP();
7431 IEM_MC_END();
7432 return VINF_SUCCESS;
7433
7434 case IEMMODE_64BIT:
7435 IEM_MC_BEGIN(1, 0);
7436 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7437 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7438 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7439 IEM_MC_ADVANCE_RIP();
7440 IEM_MC_END();
7441 return VINF_SUCCESS;
7442
7443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7444 }
7445}
7446
7447
7448/** Opcode 0x0f 0xc8. */
7449FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7450{
7451 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7452 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7453 prefix. REX.B is the correct prefix it appears. For a parallel
7454 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7455 IEMOP_HLP_MIN_486();
7456 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7457}
7458
7459
7460/** Opcode 0x0f 0xc9. */
7461FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7462{
7463 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7464 IEMOP_HLP_MIN_486();
7465 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7466}
7467
7468
7469/** Opcode 0x0f 0xca. */
7470FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7471{
7472 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7473 IEMOP_HLP_MIN_486();
7474 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7475}
7476
7477
7478/** Opcode 0x0f 0xcb. */
7479FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7480{
7481 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7482 IEMOP_HLP_MIN_486();
7483 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7484}
7485
7486
7487/** Opcode 0x0f 0xcc. */
7488FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7489{
7490 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7491 IEMOP_HLP_MIN_486();
7492 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7493}
7494
7495
7496/** Opcode 0x0f 0xcd. */
7497FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7498{
7499 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7500 IEMOP_HLP_MIN_486();
7501 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7502}
7503
7504
7505/** Opcode 0x0f 0xce. */
7506FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7507{
7508 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7509 IEMOP_HLP_MIN_486();
7510 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7511}
7512
7513
7514/** Opcode 0x0f 0xcf. */
7515FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7516{
7517 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7518 IEMOP_HLP_MIN_486();
7519 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7520}
7521
7522
7523/* Opcode 0x0f 0xd0 - invalid */
7524/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7525FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7526/* Opcode 0xf3 0x0f 0xd0 - invalid */
7527/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7528FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7529
7530/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7531FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7532/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7533FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7534/* Opcode 0xf3 0x0f 0xd1 - invalid */
7535/* Opcode 0xf2 0x0f 0xd1 - invalid */
7536
7537/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7538FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7539/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7540FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7541/* Opcode 0xf3 0x0f 0xd2 - invalid */
7542/* Opcode 0xf2 0x0f 0xd2 - invalid */
7543
7544/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7545FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7546/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7547FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7548/* Opcode 0xf3 0x0f 0xd3 - invalid */
7549/* Opcode 0xf2 0x0f 0xd3 - invalid */
7550
7551/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7552FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7553/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7554FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7555/* Opcode 0xf3 0x0f 0xd4 - invalid */
7556/* Opcode 0xf2 0x0f 0xd4 - invalid */
7557
7558/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7559FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7560/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7561FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7562/* Opcode 0xf3 0x0f 0xd5 - invalid */
7563/* Opcode 0xf2 0x0f 0xd5 - invalid */
7564
7565/* Opcode 0x0f 0xd6 - invalid */
7566/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7567FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7568/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7569FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7570/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7571FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7572#if 0
7573FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7574{
7575 /* Docs says register only. */
7576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7577
7578 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7579 {
7580 case IEM_OP_PRF_SIZE_OP: /* SSE */
7581 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7582 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7583 IEM_MC_BEGIN(2, 0);
7584 IEM_MC_ARG(uint64_t *, pDst, 0);
7585 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7587 IEM_MC_PREPARE_SSE_USAGE();
7588 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7589 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7590 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7591 IEM_MC_ADVANCE_RIP();
7592 IEM_MC_END();
7593 return VINF_SUCCESS;
7594
7595 case 0: /* MMX */
7596 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7597 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7598 IEM_MC_BEGIN(2, 0);
7599 IEM_MC_ARG(uint64_t *, pDst, 0);
7600 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7601 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7602 IEM_MC_PREPARE_FPU_USAGE();
7603 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7604 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7605 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7606 IEM_MC_ADVANCE_RIP();
7607 IEM_MC_END();
7608 return VINF_SUCCESS;
7609
7610 default:
7611 return IEMOP_RAISE_INVALID_OPCODE();
7612 }
7613}
7614#endif
7615
7616
7617/** Opcode 0x0f 0xd7. */
7618FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7619{
7620 /* Docs says register only. */
7621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7622 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7623 return IEMOP_RAISE_INVALID_OPCODE();
7624
7625 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7626 /** @todo testcase: Check that the instruction implicitly clears the high
7627 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7628 * and opcode modifications are made to work with the whole width (not
7629 * just 128). */
7630 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7631 {
7632 case IEM_OP_PRF_SIZE_OP: /* SSE */
7633 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7634 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7635 IEM_MC_BEGIN(2, 0);
7636 IEM_MC_ARG(uint64_t *, pDst, 0);
7637 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7638 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7639 IEM_MC_PREPARE_SSE_USAGE();
7640 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7641 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7642 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7643 IEM_MC_ADVANCE_RIP();
7644 IEM_MC_END();
7645 return VINF_SUCCESS;
7646
7647 case 0: /* MMX */
7648 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7649 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7650 IEM_MC_BEGIN(2, 0);
7651 IEM_MC_ARG(uint64_t *, pDst, 0);
7652 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7653 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7654 IEM_MC_PREPARE_FPU_USAGE();
7655 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7656 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7657 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7658 IEM_MC_ADVANCE_RIP();
7659 IEM_MC_END();
7660 return VINF_SUCCESS;
7661
7662 default:
7663 return IEMOP_RAISE_INVALID_OPCODE();
7664 }
7665}
7666
7667
7668/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7669FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7670/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7671FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7672/* Opcode 0xf3 0x0f 0xd8 - invalid */
7673/* Opcode 0xf2 0x0f 0xd8 - invalid */
7674
7675/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7676FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7677/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7678FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7679/* Opcode 0xf3 0x0f 0xd9 - invalid */
7680/* Opcode 0xf2 0x0f 0xd9 - invalid */
7681
7682/** Opcode 0x0f 0xda - pminub Pq, Qq */
7683FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7684/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7685FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7686/* Opcode 0xf3 0x0f 0xda - invalid */
7687/* Opcode 0xf2 0x0f 0xda - invalid */
7688
7689/** Opcode 0x0f 0xdb - pand Pq, Qq */
7690FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7691/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7692FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7693/* Opcode 0xf3 0x0f 0xdb - invalid */
7694/* Opcode 0xf2 0x0f 0xdb - invalid */
7695
7696/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7697FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7698/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7699FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7700/* Opcode 0xf3 0x0f 0xdc - invalid */
7701/* Opcode 0xf2 0x0f 0xdc - invalid */
7702
7703/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7704FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7705/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7706FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7707/* Opcode 0xf3 0x0f 0xdd - invalid */
7708/* Opcode 0xf2 0x0f 0xdd - invalid */
7709
7710/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7711FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7712/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7713FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7714/* Opcode 0xf3 0x0f 0xde - invalid */
7715/* Opcode 0xf2 0x0f 0xde - invalid */
7716
7717/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7718FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7719/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7720FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7721/* Opcode 0xf3 0x0f 0xdf - invalid */
7722/* Opcode 0xf2 0x0f 0xdf - invalid */
7723
7724/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7725FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7726/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7727FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7728/* Opcode 0xf3 0x0f 0xe0 - invalid */
7729/* Opcode 0xf2 0x0f 0xe0 - invalid */
7730
7731/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7732FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7733/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7734FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7735/* Opcode 0xf3 0x0f 0xe1 - invalid */
7736/* Opcode 0xf2 0x0f 0xe1 - invalid */
7737
7738/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7739FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7740/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7741FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7742/* Opcode 0xf3 0x0f 0xe2 - invalid */
7743/* Opcode 0xf2 0x0f 0xe2 - invalid */
7744
7745/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7746FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7747/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7748FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7749/* Opcode 0xf3 0x0f 0xe3 - invalid */
7750/* Opcode 0xf2 0x0f 0xe3 - invalid */
7751
7752/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7753FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7754/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7755FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7756/* Opcode 0xf3 0x0f 0xe4 - invalid */
7757/* Opcode 0xf2 0x0f 0xe4 - invalid */
7758
7759/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7760FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7761/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7762FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7763/* Opcode 0xf3 0x0f 0xe5 - invalid */
7764/* Opcode 0xf2 0x0f 0xe5 - invalid */
7765
7766/* Opcode 0x0f 0xe6 - invalid */
7767/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7768FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7769/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7770FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7771/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7772FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7773
7774
7775/** Opcode 0x0f 0xe7. */
7776FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7777{
7778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7779 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7780 {
7781 /*
7782 * Register, memory.
7783 */
7784/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7785 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7786 {
7787
7788 case IEM_OP_PRF_SIZE_OP: /* SSE */
7789 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7790 IEM_MC_BEGIN(0, 2);
7791 IEM_MC_LOCAL(uint128_t, uSrc);
7792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7793
7794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7796 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7798
7799 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7800 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7801
7802 IEM_MC_ADVANCE_RIP();
7803 IEM_MC_END();
7804 break;
7805
7806 case 0: /* MMX */
7807 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7808 IEM_MC_BEGIN(0, 2);
7809 IEM_MC_LOCAL(uint64_t, uSrc);
7810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7811
7812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7814 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7815 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7816
7817 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7818 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7819
7820 IEM_MC_ADVANCE_RIP();
7821 IEM_MC_END();
7822 break;
7823
7824 default:
7825 return IEMOP_RAISE_INVALID_OPCODE();
7826 }
7827 }
7828 /* The register, register encoding is invalid. */
7829 else
7830 return IEMOP_RAISE_INVALID_OPCODE();
7831 return VINF_SUCCESS;
7832}
7833
7834
7835/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7836FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7837/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7838FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7839/* Opcode 0xf3 0x0f 0xe8 - invalid */
7840/* Opcode 0xf2 0x0f 0xe8 - invalid */
7841
7842/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7843FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7844/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7845FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7846/* Opcode 0xf3 0x0f 0xe9 - invalid */
7847/* Opcode 0xf2 0x0f 0xe9 - invalid */
7848
7849/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7850FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7851/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7852FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7853/* Opcode 0xf3 0x0f 0xea - invalid */
7854/* Opcode 0xf2 0x0f 0xea - invalid */
7855
7856/** Opcode 0x0f 0xeb - por Pq, Qq */
7857FNIEMOP_STUB(iemOp_por_Pq_Qq);
7858/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7859FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7860/* Opcode 0xf3 0x0f 0xeb - invalid */
7861/* Opcode 0xf2 0x0f 0xeb - invalid */
7862
7863/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7864FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7865/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7866FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7867/* Opcode 0xf3 0x0f 0xec - invalid */
7868/* Opcode 0xf2 0x0f 0xec - invalid */
7869
7870/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7871FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7872/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7873FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7874/* Opcode 0xf3 0x0f 0xed - invalid */
7875/* Opcode 0xf2 0x0f 0xed - invalid */
7876
7877/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7878FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7879/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7880FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7881/* Opcode 0xf3 0x0f 0xee - invalid */
7882/* Opcode 0xf2 0x0f 0xee - invalid */
7883
7884
7885/** Opcode 0x0f 0xef. */
7886FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7887{
7888 IEMOP_MNEMONIC(pxor, "pxor");
7889 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7890}
7891/* Opcode 0xf3 0x0f 0xef - invalid */
7892/* Opcode 0xf2 0x0f 0xef - invalid */
7893
7894/* Opcode 0x0f 0xf0 - invalid */
7895/* Opcode 0x66 0x0f 0xf0 - invalid */
7896/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7897FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7898
7899/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7900FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7901/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7902FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7903/* Opcode 0xf2 0x0f 0xf1 - invalid */
7904
7905/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7906FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7907/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7908FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7909/* Opcode 0xf2 0x0f 0xf2 - invalid */
7910
7911/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7912FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7913/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7914FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7915/* Opcode 0xf2 0x0f 0xf3 - invalid */
7916
7917/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7918FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7919/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7920FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7921/* Opcode 0xf2 0x0f 0xf4 - invalid */
7922
7923/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7924FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7925/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7926FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7927/* Opcode 0xf2 0x0f 0xf5 - invalid */
7928
7929/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7930FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7931/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7932FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7933/* Opcode 0xf2 0x0f 0xf6 - invalid */
7934
7935/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7936FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7937/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7938FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7939/* Opcode 0xf2 0x0f 0xf7 - invalid */
7940
7941/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7942FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7943/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7944FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7945/* Opcode 0xf2 0x0f 0xf8 - invalid */
7946
7947/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7948FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7949/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7950FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7951/* Opcode 0xf2 0x0f 0xf9 - invalid */
7952
7953/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7954FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7955/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7956FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7957/* Opcode 0xf2 0x0f 0xfa - invalid */
7958
7959/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7960FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7961/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7962FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7963/* Opcode 0xf2 0x0f 0xfb - invalid */
7964
7965/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7966FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7967/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7968FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7969/* Opcode 0xf2 0x0f 0xfc - invalid */
7970
7971/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7972FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7973/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7974FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7975/* Opcode 0xf2 0x0f 0xfd - invalid */
7976
7977/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7978FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7979/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7980FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7981/* Opcode 0xf2 0x0f 0xfe - invalid */
7982
7983
7984/** Opcode **** 0x0f 0xff - UD0 */
7985FNIEMOP_DEF(iemOp_ud0)
7986{
7987 IEMOP_MNEMONIC(ud0, "ud0");
7988 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7989 {
7990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7991#ifndef TST_IEM_CHECK_MC
7992 RTGCPTR GCPtrEff;
7993 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7994 if (rcStrict != VINF_SUCCESS)
7995 return rcStrict;
7996#endif
7997 IEMOP_HLP_DONE_DECODING();
7998 }
7999 return IEMOP_RAISE_INVALID_OPCODE();
8000}
8001
8002
8003
8004/** Repeats a_fn four times. For decoding tables. */
8005#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
8006
8007IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8008{
8009 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8010 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8011 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8012 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8013 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8014 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8015 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8016 /* 0x06 */ IEMOP_X4(iemOp_clts),
8017 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8018 /* 0x08 */ IEMOP_X4(iemOp_invd),
8019 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8020 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8021 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8022 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8023 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8024 /* 0x0e */ IEMOP_X4(iemOp_femms),
8025 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8026
8027 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8028 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8029 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8030 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8031 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8032 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8033 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8034 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8035 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8036 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8037 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8038 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8039 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8040 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8041 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8042 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8043
8044 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8045 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8046 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8047 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8048 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8049 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8050 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8051 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8052 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8053 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8054 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8055 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8056 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8057 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8058 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8059 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8060
8061 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8062 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8063 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8064 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8065 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8066 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8067 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8068 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8069 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8070 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8071 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8072 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8073 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8074 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8075 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8076 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8077
8078 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8079 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8080 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8081 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8082 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8083 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8084 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8085 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8086 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8087 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8088 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8089 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8090 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8091 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8092 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8093 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8094
8095 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8096 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8097 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8098 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8099 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8100 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8101 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8102 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8103 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8104 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8105 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8106 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8107 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8108 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8109 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8110 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8111
8112 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8113 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8114 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8115 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8116 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8117 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8118 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8119 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8120 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8121 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8122 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8123 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8124 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8125 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8126 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8127 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq),
8128
8129 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
8130 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8131 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8132 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8133 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8134 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8135 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8136 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8137
8138 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8140 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8143 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8144 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8145 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8146
8147 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8148 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8149 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8150 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8151 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8152 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8153 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8154 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8155 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8156 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8157 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8158 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8159 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8160 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8161 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8162 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8163
8164 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8165 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8166 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8167 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8168 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8169 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8170 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8171 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8172 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8173 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8174 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8175 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8176 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8177 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8178 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8179 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8180
8181 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8182 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8183 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8184 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8185 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8186 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8187 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8188 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8189 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8190 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8191 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8192 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8193 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8194 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8195 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8196 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8197
8198 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8199 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8200 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8201 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8202 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8203 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8204 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8205 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8206 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8207 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8208 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8209 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8210 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8211 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8212 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8213 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8214
8215 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8216 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8217 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8218 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8219 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8220 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8221 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8222 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8223 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8224 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8225 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8226 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8227 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8228 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8229 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8230 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8231
8232 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8233 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8234 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8235 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8237 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8238 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8239 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8240 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8241 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8242 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8243 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8244 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8245 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8246 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8247 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8248
8249 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8250 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8251 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8252 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8253 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8254 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8255 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8256 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8257 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8258 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8259 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8260 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8261 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8262 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8263 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8264 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8265
8266 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8267 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8268 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8269 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8270 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8271 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8272 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8273 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8274 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8275 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8276 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8277 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8278 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8279 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8280 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8281 /* 0xff */ IEMOP_X4(iemOp_ud0),
8282};
8283AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8284/** @} */
8285
8286
8287/** @name One byte opcodes.
8288 *
8289 * @{
8290 */
8291
8292/** Opcode 0x00. */
8293FNIEMOP_DEF(iemOp_add_Eb_Gb)
8294{
8295 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8296 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8297}
8298
8299
8300/** Opcode 0x01. */
8301FNIEMOP_DEF(iemOp_add_Ev_Gv)
8302{
8303 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8304 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8305}
8306
8307
8308/** Opcode 0x02. */
8309FNIEMOP_DEF(iemOp_add_Gb_Eb)
8310{
8311 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8312 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8313}
8314
8315
8316/** Opcode 0x03. */
8317FNIEMOP_DEF(iemOp_add_Gv_Ev)
8318{
8319 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8320 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8321}
8322
8323
8324/** Opcode 0x04. */
8325FNIEMOP_DEF(iemOp_add_Al_Ib)
8326{
8327 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8328 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8329}
8330
8331
8332/** Opcode 0x05. */
8333FNIEMOP_DEF(iemOp_add_eAX_Iz)
8334{
8335 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8336 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8337}
8338
8339
8340/** Opcode 0x06. */
8341FNIEMOP_DEF(iemOp_push_ES)
8342{
8343 IEMOP_MNEMONIC(push_es, "push es");
8344 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8345}
8346
8347
8348/** Opcode 0x07. */
8349FNIEMOP_DEF(iemOp_pop_ES)
8350{
8351 IEMOP_MNEMONIC(pop_es, "pop es");
8352 IEMOP_HLP_NO_64BIT();
8353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8354 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8355}
8356
8357
8358/** Opcode 0x08. */
8359FNIEMOP_DEF(iemOp_or_Eb_Gb)
8360{
8361 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8362 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8363 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8364}
8365
8366
8367/** Opcode 0x09. */
8368FNIEMOP_DEF(iemOp_or_Ev_Gv)
8369{
8370 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8371 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8372 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8373}
8374
8375
8376/** Opcode 0x0a. */
8377FNIEMOP_DEF(iemOp_or_Gb_Eb)
8378{
8379 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8380 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8381 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8382}
8383
8384
8385/** Opcode 0x0b. */
8386FNIEMOP_DEF(iemOp_or_Gv_Ev)
8387{
8388 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8389 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8391}
8392
8393
8394/** Opcode 0x0c. */
8395FNIEMOP_DEF(iemOp_or_Al_Ib)
8396{
8397 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8398 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8399 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8400}
8401
8402
8403/** Opcode 0x0d. */
8404FNIEMOP_DEF(iemOp_or_eAX_Iz)
8405{
8406 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8407 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8409}
8410
8411
8412/** Opcode 0x0e. */
8413FNIEMOP_DEF(iemOp_push_CS)
8414{
8415 IEMOP_MNEMONIC(push_cs, "push cs");
8416 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8417}
8418
8419
8420/** Opcode 0x0f. */
8421FNIEMOP_DEF(iemOp_2byteEscape)
8422{
8423#ifdef VBOX_STRICT
8424 static bool s_fTested = false;
8425 if (RT_LIKELY(s_fTested)) { /* likely */ }
8426 else
8427 {
8428 s_fTested = true;
8429 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8430 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8431 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8432 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8433 }
8434#endif
8435
8436 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8437
8438 /** @todo PUSH CS on 8086, undefined on 80186. */
8439 IEMOP_HLP_MIN_286();
8440 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8441}
8442
8443/** Opcode 0x10. */
8444FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8445{
8446 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8447 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8448}
8449
8450
8451/** Opcode 0x11. */
8452FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8453{
8454 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8455 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8456}
8457
8458
8459/** Opcode 0x12. */
8460FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8461{
8462 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8463 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8464}
8465
8466
8467/** Opcode 0x13. */
8468FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8469{
8470 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8472}
8473
8474
8475/** Opcode 0x14. */
8476FNIEMOP_DEF(iemOp_adc_Al_Ib)
8477{
8478 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8479 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8480}
8481
8482
8483/** Opcode 0x15. */
8484FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8485{
8486 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8488}
8489
8490
8491/** Opcode 0x16. */
8492FNIEMOP_DEF(iemOp_push_SS)
8493{
8494 IEMOP_MNEMONIC(push_ss, "push ss");
8495 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8496}
8497
8498
8499/** Opcode 0x17. */
8500FNIEMOP_DEF(iemOp_pop_SS)
8501{
8502 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8504 IEMOP_HLP_NO_64BIT();
8505 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8506}
8507
8508
8509/** Opcode 0x18. */
8510FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8511{
8512 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8514}
8515
8516
8517/** Opcode 0x19. */
8518FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8519{
8520 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8521 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8522}
8523
8524
8525/** Opcode 0x1a. */
8526FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8527{
8528 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8529 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8530}
8531
8532
8533/** Opcode 0x1b. */
8534FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8535{
8536 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8537 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8538}
8539
8540
8541/** Opcode 0x1c. */
8542FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8543{
8544 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8545 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8546}
8547
8548
8549/** Opcode 0x1d. */
8550FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8551{
8552 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8553 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8554}
8555
8556
8557/** Opcode 0x1e. */
8558FNIEMOP_DEF(iemOp_push_DS)
8559{
8560 IEMOP_MNEMONIC(push_ds, "push ds");
8561 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8562}
8563
8564
8565/** Opcode 0x1f. */
8566FNIEMOP_DEF(iemOp_pop_DS)
8567{
8568 IEMOP_MNEMONIC(pop_ds, "pop ds");
8569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8570 IEMOP_HLP_NO_64BIT();
8571 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8572}
8573
8574
8575/** Opcode 0x20. */
8576FNIEMOP_DEF(iemOp_and_Eb_Gb)
8577{
8578 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8579 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8580 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8581}
8582
8583
8584/** Opcode 0x21. */
8585FNIEMOP_DEF(iemOp_and_Ev_Gv)
8586{
8587 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8588 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8589 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8590}
8591
8592
8593/** Opcode 0x22. */
8594FNIEMOP_DEF(iemOp_and_Gb_Eb)
8595{
8596 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8597 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8598 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8599}
8600
8601
8602/** Opcode 0x23. */
8603FNIEMOP_DEF(iemOp_and_Gv_Ev)
8604{
8605 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8606 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8607 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8608}
8609
8610
8611/** Opcode 0x24. */
8612FNIEMOP_DEF(iemOp_and_Al_Ib)
8613{
8614 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8615 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8616 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8617}
8618
8619
8620/** Opcode 0x25. */
8621FNIEMOP_DEF(iemOp_and_eAX_Iz)
8622{
8623 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8624 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8625 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8626}
8627
8628
8629/** Opcode 0x26. */
8630FNIEMOP_DEF(iemOp_seg_ES)
8631{
8632 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8633 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8634 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8635
8636 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8637 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8638}
8639
8640
8641/** Opcode 0x27. */
8642FNIEMOP_DEF(iemOp_daa)
8643{
8644 IEMOP_MNEMONIC(daa_AL, "daa AL");
8645 IEMOP_HLP_NO_64BIT();
8646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8647 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8648 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8649}
8650
8651
8652/** Opcode 0x28. */
8653FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8654{
8655 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8656 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8657}
8658
8659
8660/** Opcode 0x29. */
8661FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8662{
8663 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8664 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8665}
8666
8667
8668/** Opcode 0x2a. */
8669FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8670{
8671 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8672 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8673}
8674
8675
8676/** Opcode 0x2b. */
8677FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8678{
8679 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8680 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8681}
8682
8683
8684/** Opcode 0x2c. */
8685FNIEMOP_DEF(iemOp_sub_Al_Ib)
8686{
8687 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8688 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8689}
8690
8691
8692/** Opcode 0x2d. */
8693FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8694{
8695 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8696 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8697}
8698
8699
8700/** Opcode 0x2e. */
8701FNIEMOP_DEF(iemOp_seg_CS)
8702{
8703 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8704 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8705 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8706
8707 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8708 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8709}
8710
8711
8712/** Opcode 0x2f. */
8713FNIEMOP_DEF(iemOp_das)
8714{
8715 IEMOP_MNEMONIC(das_AL, "das AL");
8716 IEMOP_HLP_NO_64BIT();
8717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8718 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8719 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8720}
8721
8722
8723/** Opcode 0x30. */
8724FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8725{
8726 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8727 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8728 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8729}
8730
8731
8732/** Opcode 0x31. */
8733FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8734{
8735 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8736 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8738}
8739
8740
8741/** Opcode 0x32. */
8742FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8743{
8744 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8745 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8746 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8747}
8748
8749
8750/** Opcode 0x33. */
8751FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8752{
8753 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8754 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8755 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8756}
8757
8758
8759/** Opcode 0x34. */
8760FNIEMOP_DEF(iemOp_xor_Al_Ib)
8761{
8762 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8763 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8764 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8765}
8766
8767
8768/** Opcode 0x35. */
8769FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8770{
8771 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8773 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8774}
8775
8776
8777/** Opcode 0x36. */
8778FNIEMOP_DEF(iemOp_seg_SS)
8779{
8780 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8781 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8782 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8783
8784 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8785 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8786}
8787
8788
8789/** Opcode 0x37. */
8790FNIEMOP_STUB(iemOp_aaa);
8791
8792
8793/** Opcode 0x38. */
8794FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8795{
8796 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8797 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8798}
8799
8800
8801/** Opcode 0x39. */
8802FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8803{
8804 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8805 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8806}
8807
8808
8809/** Opcode 0x3a. */
8810FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8811{
8812 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8813 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8814}
8815
8816
8817/** Opcode 0x3b. */
8818FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8819{
8820 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8821 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8822}
8823
8824
8825/** Opcode 0x3c. */
8826FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8827{
8828 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8829 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8830}
8831
8832
8833/** Opcode 0x3d. */
8834FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8835{
8836 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8837 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8838}
8839
8840
8841/** Opcode 0x3e. */
8842FNIEMOP_DEF(iemOp_seg_DS)
8843{
8844 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8845 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8846 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8847
8848 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8849 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8850}
8851
8852
8853/** Opcode 0x3f. */
8854FNIEMOP_STUB(iemOp_aas);
8855
8856/**
8857 * Common 'inc/dec/not/neg register' helper.
8858 */
8859FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8860{
8861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8862 switch (pVCpu->iem.s.enmEffOpSize)
8863 {
8864 case IEMMODE_16BIT:
8865 IEM_MC_BEGIN(2, 0);
8866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8867 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8868 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8869 IEM_MC_REF_EFLAGS(pEFlags);
8870 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8871 IEM_MC_ADVANCE_RIP();
8872 IEM_MC_END();
8873 return VINF_SUCCESS;
8874
8875 case IEMMODE_32BIT:
8876 IEM_MC_BEGIN(2, 0);
8877 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8878 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8879 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8880 IEM_MC_REF_EFLAGS(pEFlags);
8881 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8882 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8883 IEM_MC_ADVANCE_RIP();
8884 IEM_MC_END();
8885 return VINF_SUCCESS;
8886
8887 case IEMMODE_64BIT:
8888 IEM_MC_BEGIN(2, 0);
8889 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8890 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8891 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8892 IEM_MC_REF_EFLAGS(pEFlags);
8893 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8894 IEM_MC_ADVANCE_RIP();
8895 IEM_MC_END();
8896 return VINF_SUCCESS;
8897 }
8898 return VINF_SUCCESS;
8899}
8900
8901
8902/** Opcode 0x40. */
8903FNIEMOP_DEF(iemOp_inc_eAX)
8904{
8905 /*
8906 * This is a REX prefix in 64-bit mode.
8907 */
8908 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8909 {
8910 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8911 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8912
8913 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8914 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8915 }
8916
8917 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8918 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8919}
8920
8921
8922/** Opcode 0x41. */
8923FNIEMOP_DEF(iemOp_inc_eCX)
8924{
8925 /*
8926 * This is a REX prefix in 64-bit mode.
8927 */
8928 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8929 {
8930 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8931 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8932 pVCpu->iem.s.uRexB = 1 << 3;
8933
8934 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8935 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8936 }
8937
8938 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8939 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8940}
8941
8942
8943/** Opcode 0x42. */
8944FNIEMOP_DEF(iemOp_inc_eDX)
8945{
8946 /*
8947 * This is a REX prefix in 64-bit mode.
8948 */
8949 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8950 {
8951 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8952 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8953 pVCpu->iem.s.uRexIndex = 1 << 3;
8954
8955 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8956 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8957 }
8958
8959 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8960 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8961}
8962
8963
8964
8965/** Opcode 0x43. */
8966FNIEMOP_DEF(iemOp_inc_eBX)
8967{
8968 /*
8969 * This is a REX prefix in 64-bit mode.
8970 */
8971 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8972 {
8973 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8974 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8975 pVCpu->iem.s.uRexB = 1 << 3;
8976 pVCpu->iem.s.uRexIndex = 1 << 3;
8977
8978 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8979 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8980 }
8981
8982 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8983 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8984}
8985
8986
8987/** Opcode 0x44. */
8988FNIEMOP_DEF(iemOp_inc_eSP)
8989{
8990 /*
8991 * This is a REX prefix in 64-bit mode.
8992 */
8993 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8994 {
8995 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8996 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8997 pVCpu->iem.s.uRexReg = 1 << 3;
8998
8999 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9000 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9001 }
9002
9003 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
9004 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
9005}
9006
9007
9008/** Opcode 0x45. */
9009FNIEMOP_DEF(iemOp_inc_eBP)
9010{
9011 /*
9012 * This is a REX prefix in 64-bit mode.
9013 */
9014 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9015 {
9016 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
9017 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
9018 pVCpu->iem.s.uRexReg = 1 << 3;
9019 pVCpu->iem.s.uRexB = 1 << 3;
9020
9021 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9022 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9023 }
9024
9025 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
9026 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
9027}
9028
9029
9030/** Opcode 0x46. */
9031FNIEMOP_DEF(iemOp_inc_eSI)
9032{
9033 /*
9034 * This is a REX prefix in 64-bit mode.
9035 */
9036 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9037 {
9038 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
9039 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
9040 pVCpu->iem.s.uRexReg = 1 << 3;
9041 pVCpu->iem.s.uRexIndex = 1 << 3;
9042
9043 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9044 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9045 }
9046
9047 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
9048 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
9049}
9050
9051
9052/** Opcode 0x47. */
9053FNIEMOP_DEF(iemOp_inc_eDI)
9054{
9055 /*
9056 * This is a REX prefix in 64-bit mode.
9057 */
9058 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9059 {
9060 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9061 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9062 pVCpu->iem.s.uRexReg = 1 << 3;
9063 pVCpu->iem.s.uRexB = 1 << 3;
9064 pVCpu->iem.s.uRexIndex = 1 << 3;
9065
9066 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9067 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9068 }
9069
9070 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9071 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9072}
9073
9074
9075/** Opcode 0x48. */
9076FNIEMOP_DEF(iemOp_dec_eAX)
9077{
9078 /*
9079 * This is a REX prefix in 64-bit mode.
9080 */
9081 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9082 {
9083 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9084 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9085 iemRecalEffOpSize(pVCpu);
9086
9087 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9088 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9089 }
9090
9091 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9092 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9093}
9094
9095
9096/** Opcode 0x49. */
9097FNIEMOP_DEF(iemOp_dec_eCX)
9098{
9099 /*
9100 * This is a REX prefix in 64-bit mode.
9101 */
9102 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9103 {
9104 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9105 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9106 pVCpu->iem.s.uRexB = 1 << 3;
9107 iemRecalEffOpSize(pVCpu);
9108
9109 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9110 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9111 }
9112
9113 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9114 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9115}
9116
9117
9118/** Opcode 0x4a. */
9119FNIEMOP_DEF(iemOp_dec_eDX)
9120{
9121 /*
9122 * This is a REX prefix in 64-bit mode.
9123 */
9124 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9125 {
9126 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9127 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9128 pVCpu->iem.s.uRexIndex = 1 << 3;
9129 iemRecalEffOpSize(pVCpu);
9130
9131 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9132 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9133 }
9134
9135 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9136 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9137}
9138
9139
9140/** Opcode 0x4b. */
9141FNIEMOP_DEF(iemOp_dec_eBX)
9142{
9143 /*
9144 * This is a REX prefix in 64-bit mode.
9145 */
9146 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9147 {
9148 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9149 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9150 pVCpu->iem.s.uRexB = 1 << 3;
9151 pVCpu->iem.s.uRexIndex = 1 << 3;
9152 iemRecalEffOpSize(pVCpu);
9153
9154 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9155 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9156 }
9157
9158 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9159 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9160}
9161
9162
9163/** Opcode 0x4c. */
9164FNIEMOP_DEF(iemOp_dec_eSP)
9165{
9166 /*
9167 * This is a REX prefix in 64-bit mode.
9168 */
9169 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9170 {
9171 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9172 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9173 pVCpu->iem.s.uRexReg = 1 << 3;
9174 iemRecalEffOpSize(pVCpu);
9175
9176 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9177 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9178 }
9179
9180 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9181 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9182}
9183
9184
9185/** Opcode 0x4d. */
9186FNIEMOP_DEF(iemOp_dec_eBP)
9187{
9188 /*
9189 * This is a REX prefix in 64-bit mode.
9190 */
9191 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9192 {
9193 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9194 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9195 pVCpu->iem.s.uRexReg = 1 << 3;
9196 pVCpu->iem.s.uRexB = 1 << 3;
9197 iemRecalEffOpSize(pVCpu);
9198
9199 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9200 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9201 }
9202
9203 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9204 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9205}
9206
9207
9208/** Opcode 0x4e. */
9209FNIEMOP_DEF(iemOp_dec_eSI)
9210{
9211 /*
9212 * This is a REX prefix in 64-bit mode.
9213 */
9214 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9215 {
9216 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9217 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9218 pVCpu->iem.s.uRexReg = 1 << 3;
9219 pVCpu->iem.s.uRexIndex = 1 << 3;
9220 iemRecalEffOpSize(pVCpu);
9221
9222 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9223 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9224 }
9225
9226 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9227 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9228}
9229
9230
9231/** Opcode 0x4f. */
9232FNIEMOP_DEF(iemOp_dec_eDI)
9233{
9234 /*
9235 * This is a REX prefix in 64-bit mode.
9236 */
9237 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9238 {
9239 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9240 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9241 pVCpu->iem.s.uRexReg = 1 << 3;
9242 pVCpu->iem.s.uRexB = 1 << 3;
9243 pVCpu->iem.s.uRexIndex = 1 << 3;
9244 iemRecalEffOpSize(pVCpu);
9245
9246 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9247 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9248 }
9249
9250 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9251 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9252}
9253
9254
9255/**
9256 * Common 'push register' helper.
9257 */
9258FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9259{
9260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9261 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9262 {
9263 iReg |= pVCpu->iem.s.uRexB;
9264 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9265 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9266 }
9267
9268 switch (pVCpu->iem.s.enmEffOpSize)
9269 {
9270 case IEMMODE_16BIT:
9271 IEM_MC_BEGIN(0, 1);
9272 IEM_MC_LOCAL(uint16_t, u16Value);
9273 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9274 IEM_MC_PUSH_U16(u16Value);
9275 IEM_MC_ADVANCE_RIP();
9276 IEM_MC_END();
9277 break;
9278
9279 case IEMMODE_32BIT:
9280 IEM_MC_BEGIN(0, 1);
9281 IEM_MC_LOCAL(uint32_t, u32Value);
9282 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9283 IEM_MC_PUSH_U32(u32Value);
9284 IEM_MC_ADVANCE_RIP();
9285 IEM_MC_END();
9286 break;
9287
9288 case IEMMODE_64BIT:
9289 IEM_MC_BEGIN(0, 1);
9290 IEM_MC_LOCAL(uint64_t, u64Value);
9291 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9292 IEM_MC_PUSH_U64(u64Value);
9293 IEM_MC_ADVANCE_RIP();
9294 IEM_MC_END();
9295 break;
9296 }
9297
9298 return VINF_SUCCESS;
9299}
9300
9301
9302/** Opcode 0x50. */
9303FNIEMOP_DEF(iemOp_push_eAX)
9304{
9305 IEMOP_MNEMONIC(push_rAX, "push rAX");
9306 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9307}
9308
9309
9310/** Opcode 0x51. */
9311FNIEMOP_DEF(iemOp_push_eCX)
9312{
9313 IEMOP_MNEMONIC(push_rCX, "push rCX");
9314 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9315}
9316
9317
9318/** Opcode 0x52. */
9319FNIEMOP_DEF(iemOp_push_eDX)
9320{
9321 IEMOP_MNEMONIC(push_rDX, "push rDX");
9322 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9323}
9324
9325
9326/** Opcode 0x53. */
9327FNIEMOP_DEF(iemOp_push_eBX)
9328{
9329 IEMOP_MNEMONIC(push_rBX, "push rBX");
9330 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9331}
9332
9333
9334/** Opcode 0x54. */
9335FNIEMOP_DEF(iemOp_push_eSP)
9336{
9337 IEMOP_MNEMONIC(push_rSP, "push rSP");
9338 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9339 {
9340 IEM_MC_BEGIN(0, 1);
9341 IEM_MC_LOCAL(uint16_t, u16Value);
9342 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9343 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9344 IEM_MC_PUSH_U16(u16Value);
9345 IEM_MC_ADVANCE_RIP();
9346 IEM_MC_END();
9347 }
9348 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9349}
9350
9351
9352/** Opcode 0x55. */
9353FNIEMOP_DEF(iemOp_push_eBP)
9354{
9355 IEMOP_MNEMONIC(push_rBP, "push rBP");
9356 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9357}
9358
9359
9360/** Opcode 0x56. */
9361FNIEMOP_DEF(iemOp_push_eSI)
9362{
9363 IEMOP_MNEMONIC(push_rSI, "push rSI");
9364 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9365}
9366
9367
9368/** Opcode 0x57. */
9369FNIEMOP_DEF(iemOp_push_eDI)
9370{
9371 IEMOP_MNEMONIC(push_rDI, "push rDI");
9372 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9373}
9374
9375
9376/**
9377 * Common 'pop register' helper.
9378 */
9379FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9380{
9381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9382 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9383 {
9384 iReg |= pVCpu->iem.s.uRexB;
9385 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9386 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9387 }
9388
9389 switch (pVCpu->iem.s.enmEffOpSize)
9390 {
9391 case IEMMODE_16BIT:
9392 IEM_MC_BEGIN(0, 1);
9393 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9394 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9395 IEM_MC_POP_U16(pu16Dst);
9396 IEM_MC_ADVANCE_RIP();
9397 IEM_MC_END();
9398 break;
9399
9400 case IEMMODE_32BIT:
9401 IEM_MC_BEGIN(0, 1);
9402 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9403 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9404 IEM_MC_POP_U32(pu32Dst);
9405 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9406 IEM_MC_ADVANCE_RIP();
9407 IEM_MC_END();
9408 break;
9409
9410 case IEMMODE_64BIT:
9411 IEM_MC_BEGIN(0, 1);
9412 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9413 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9414 IEM_MC_POP_U64(pu64Dst);
9415 IEM_MC_ADVANCE_RIP();
9416 IEM_MC_END();
9417 break;
9418 }
9419
9420 return VINF_SUCCESS;
9421}
9422
9423
9424/** Opcode 0x58. */
9425FNIEMOP_DEF(iemOp_pop_eAX)
9426{
9427 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9428 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9429}
9430
9431
9432/** Opcode 0x59. */
9433FNIEMOP_DEF(iemOp_pop_eCX)
9434{
9435 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9436 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9437}
9438
9439
9440/** Opcode 0x5a. */
9441FNIEMOP_DEF(iemOp_pop_eDX)
9442{
9443 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9444 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9445}
9446
9447
9448/** Opcode 0x5b. */
9449FNIEMOP_DEF(iemOp_pop_eBX)
9450{
9451 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9452 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9453}
9454
9455
9456/** Opcode 0x5c. */
9457FNIEMOP_DEF(iemOp_pop_eSP)
9458{
9459 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9460 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9461 {
9462 if (pVCpu->iem.s.uRexB)
9463 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9464 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9465 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9466 }
9467
9468 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9469 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9470 /** @todo add testcase for this instruction. */
9471 switch (pVCpu->iem.s.enmEffOpSize)
9472 {
9473 case IEMMODE_16BIT:
9474 IEM_MC_BEGIN(0, 1);
9475 IEM_MC_LOCAL(uint16_t, u16Dst);
9476 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9477 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9478 IEM_MC_ADVANCE_RIP();
9479 IEM_MC_END();
9480 break;
9481
9482 case IEMMODE_32BIT:
9483 IEM_MC_BEGIN(0, 1);
9484 IEM_MC_LOCAL(uint32_t, u32Dst);
9485 IEM_MC_POP_U32(&u32Dst);
9486 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9487 IEM_MC_ADVANCE_RIP();
9488 IEM_MC_END();
9489 break;
9490
9491 case IEMMODE_64BIT:
9492 IEM_MC_BEGIN(0, 1);
9493 IEM_MC_LOCAL(uint64_t, u64Dst);
9494 IEM_MC_POP_U64(&u64Dst);
9495 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9496 IEM_MC_ADVANCE_RIP();
9497 IEM_MC_END();
9498 break;
9499 }
9500
9501 return VINF_SUCCESS;
9502}
9503
9504
9505/** Opcode 0x5d. */
9506FNIEMOP_DEF(iemOp_pop_eBP)
9507{
9508 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9509 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9510}
9511
9512
9513/** Opcode 0x5e. */
9514FNIEMOP_DEF(iemOp_pop_eSI)
9515{
9516 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9517 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9518}
9519
9520
9521/** Opcode 0x5f. */
9522FNIEMOP_DEF(iemOp_pop_eDI)
9523{
9524 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9525 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9526}
9527
9528
9529/** Opcode 0x60. */
9530FNIEMOP_DEF(iemOp_pusha)
9531{
9532 IEMOP_MNEMONIC(pusha, "pusha");
9533 IEMOP_HLP_MIN_186();
9534 IEMOP_HLP_NO_64BIT();
9535 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9536 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9537 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9538 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9539}
9540
9541
9542/** Opcode 0x61. */
9543FNIEMOP_DEF(iemOp_popa)
9544{
9545 IEMOP_MNEMONIC(popa, "popa");
9546 IEMOP_HLP_MIN_186();
9547 IEMOP_HLP_NO_64BIT();
9548 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9549 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9550 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9551 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9552}
9553
9554
9555/** Opcode 0x62. */
9556FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9557// IEMOP_HLP_MIN_186();
9558
9559
9560/** Opcode 0x63 - non-64-bit modes. */
9561FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9562{
9563 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9564 IEMOP_HLP_MIN_286();
9565 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9567
9568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9569 {
9570 /* Register */
9571 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9572 IEM_MC_BEGIN(3, 0);
9573 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9574 IEM_MC_ARG(uint16_t, u16Src, 1);
9575 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9576
9577 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9578 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9579 IEM_MC_REF_EFLAGS(pEFlags);
9580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9581
9582 IEM_MC_ADVANCE_RIP();
9583 IEM_MC_END();
9584 }
9585 else
9586 {
9587 /* Memory */
9588 IEM_MC_BEGIN(3, 2);
9589 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9590 IEM_MC_ARG(uint16_t, u16Src, 1);
9591 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9593
9594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9595 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9596 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9597 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9598 IEM_MC_FETCH_EFLAGS(EFlags);
9599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9600
9601 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9602 IEM_MC_COMMIT_EFLAGS(EFlags);
9603 IEM_MC_ADVANCE_RIP();
9604 IEM_MC_END();
9605 }
9606 return VINF_SUCCESS;
9607
9608}
9609
9610
9611/** Opcode 0x63.
9612 * @note This is a weird one. It works like a regular move instruction if
9613 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9614 * @todo This definitely needs a testcase to verify the odd cases. */
9615FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9616{
9617 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9618
9619 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9621
9622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9623 {
9624 /*
9625 * Register to register.
9626 */
9627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9628 IEM_MC_BEGIN(0, 1);
9629 IEM_MC_LOCAL(uint64_t, u64Value);
9630 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9631 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9632 IEM_MC_ADVANCE_RIP();
9633 IEM_MC_END();
9634 }
9635 else
9636 {
9637 /*
9638 * We're loading a register from memory.
9639 */
9640 IEM_MC_BEGIN(0, 2);
9641 IEM_MC_LOCAL(uint64_t, u64Value);
9642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9645 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9646 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9647 IEM_MC_ADVANCE_RIP();
9648 IEM_MC_END();
9649 }
9650 return VINF_SUCCESS;
9651}
9652
9653
9654/** Opcode 0x64. */
9655FNIEMOP_DEF(iemOp_seg_FS)
9656{
9657 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9658 IEMOP_HLP_MIN_386();
9659
9660 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9661 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9662
9663 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9664 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9665}
9666
9667
9668/** Opcode 0x65. */
9669FNIEMOP_DEF(iemOp_seg_GS)
9670{
9671 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9672 IEMOP_HLP_MIN_386();
9673
9674 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9675 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9676
9677 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9678 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9679}
9680
9681
9682/** Opcode 0x66. */
9683FNIEMOP_DEF(iemOp_op_size)
9684{
9685 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9686 IEMOP_HLP_MIN_386();
9687
9688 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9689 iemRecalEffOpSize(pVCpu);
9690
9691 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9692 when REPZ or REPNZ are present. */
9693 if (pVCpu->iem.s.idxPrefix == 0)
9694 pVCpu->iem.s.idxPrefix = 1;
9695
9696 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9697 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9698}
9699
9700
9701/** Opcode 0x67. */
9702FNIEMOP_DEF(iemOp_addr_size)
9703{
9704 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9705 IEMOP_HLP_MIN_386();
9706
9707 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9708 switch (pVCpu->iem.s.enmDefAddrMode)
9709 {
9710 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9711 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9712 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9713 default: AssertFailed();
9714 }
9715
9716 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9717 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9718}
9719
9720
9721/** Opcode 0x68. */
9722FNIEMOP_DEF(iemOp_push_Iz)
9723{
9724 IEMOP_MNEMONIC(push_Iz, "push Iz");
9725 IEMOP_HLP_MIN_186();
9726 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9727 switch (pVCpu->iem.s.enmEffOpSize)
9728 {
9729 case IEMMODE_16BIT:
9730 {
9731 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9733 IEM_MC_BEGIN(0,0);
9734 IEM_MC_PUSH_U16(u16Imm);
9735 IEM_MC_ADVANCE_RIP();
9736 IEM_MC_END();
9737 return VINF_SUCCESS;
9738 }
9739
9740 case IEMMODE_32BIT:
9741 {
9742 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9744 IEM_MC_BEGIN(0,0);
9745 IEM_MC_PUSH_U32(u32Imm);
9746 IEM_MC_ADVANCE_RIP();
9747 IEM_MC_END();
9748 return VINF_SUCCESS;
9749 }
9750
9751 case IEMMODE_64BIT:
9752 {
9753 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9755 IEM_MC_BEGIN(0,0);
9756 IEM_MC_PUSH_U64(u64Imm);
9757 IEM_MC_ADVANCE_RIP();
9758 IEM_MC_END();
9759 return VINF_SUCCESS;
9760 }
9761
9762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9763 }
9764}
9765
9766
9767/** Opcode 0x69. */
9768FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9769{
9770 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9771 IEMOP_HLP_MIN_186();
9772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9773 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9774
9775 switch (pVCpu->iem.s.enmEffOpSize)
9776 {
9777 case IEMMODE_16BIT:
9778 {
9779 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9780 {
9781 /* register operand */
9782 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9784
9785 IEM_MC_BEGIN(3, 1);
9786 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9787 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9788 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9789 IEM_MC_LOCAL(uint16_t, u16Tmp);
9790
9791 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9792 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9793 IEM_MC_REF_EFLAGS(pEFlags);
9794 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9795 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9796
9797 IEM_MC_ADVANCE_RIP();
9798 IEM_MC_END();
9799 }
9800 else
9801 {
9802 /* memory operand */
9803 IEM_MC_BEGIN(3, 2);
9804 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9805 IEM_MC_ARG(uint16_t, u16Src, 1);
9806 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9807 IEM_MC_LOCAL(uint16_t, u16Tmp);
9808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9809
9810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9811 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9812 IEM_MC_ASSIGN(u16Src, u16Imm);
9813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9814 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9815 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9816 IEM_MC_REF_EFLAGS(pEFlags);
9817 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9818 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9819
9820 IEM_MC_ADVANCE_RIP();
9821 IEM_MC_END();
9822 }
9823 return VINF_SUCCESS;
9824 }
9825
9826 case IEMMODE_32BIT:
9827 {
9828 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9829 {
9830 /* register operand */
9831 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9833
9834 IEM_MC_BEGIN(3, 1);
9835 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9836 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9837 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9838 IEM_MC_LOCAL(uint32_t, u32Tmp);
9839
9840 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9841 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9842 IEM_MC_REF_EFLAGS(pEFlags);
9843 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9844 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9845
9846 IEM_MC_ADVANCE_RIP();
9847 IEM_MC_END();
9848 }
9849 else
9850 {
9851 /* memory operand */
9852 IEM_MC_BEGIN(3, 2);
9853 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9854 IEM_MC_ARG(uint32_t, u32Src, 1);
9855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9856 IEM_MC_LOCAL(uint32_t, u32Tmp);
9857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9858
9859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9860 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9861 IEM_MC_ASSIGN(u32Src, u32Imm);
9862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9863 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9864 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9865 IEM_MC_REF_EFLAGS(pEFlags);
9866 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9867 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9868
9869 IEM_MC_ADVANCE_RIP();
9870 IEM_MC_END();
9871 }
9872 return VINF_SUCCESS;
9873 }
9874
9875 case IEMMODE_64BIT:
9876 {
9877 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9878 {
9879 /* register operand */
9880 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9882
9883 IEM_MC_BEGIN(3, 1);
9884 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9885 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9886 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9887 IEM_MC_LOCAL(uint64_t, u64Tmp);
9888
9889 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9890 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9891 IEM_MC_REF_EFLAGS(pEFlags);
9892 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9893 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9894
9895 IEM_MC_ADVANCE_RIP();
9896 IEM_MC_END();
9897 }
9898 else
9899 {
9900 /* memory operand */
9901 IEM_MC_BEGIN(3, 2);
9902 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9903 IEM_MC_ARG(uint64_t, u64Src, 1);
9904 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9905 IEM_MC_LOCAL(uint64_t, u64Tmp);
9906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9907
9908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9909 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9910 IEM_MC_ASSIGN(u64Src, u64Imm);
9911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9912 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9913 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9914 IEM_MC_REF_EFLAGS(pEFlags);
9915 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9916 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9917
9918 IEM_MC_ADVANCE_RIP();
9919 IEM_MC_END();
9920 }
9921 return VINF_SUCCESS;
9922 }
9923 }
9924 AssertFailedReturn(VERR_IEM_IPE_9);
9925}
9926
9927
9928/** Opcode 0x6a. */
9929FNIEMOP_DEF(iemOp_push_Ib)
9930{
9931 IEMOP_MNEMONIC(push_Ib, "push Ib");
9932 IEMOP_HLP_MIN_186();
9933 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9935 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9936
9937 IEM_MC_BEGIN(0,0);
9938 switch (pVCpu->iem.s.enmEffOpSize)
9939 {
9940 case IEMMODE_16BIT:
9941 IEM_MC_PUSH_U16(i8Imm);
9942 break;
9943 case IEMMODE_32BIT:
9944 IEM_MC_PUSH_U32(i8Imm);
9945 break;
9946 case IEMMODE_64BIT:
9947 IEM_MC_PUSH_U64(i8Imm);
9948 break;
9949 }
9950 IEM_MC_ADVANCE_RIP();
9951 IEM_MC_END();
9952 return VINF_SUCCESS;
9953}
9954
9955
9956/** Opcode 0x6b. */
9957FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9958{
9959 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9960 IEMOP_HLP_MIN_186();
9961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9962 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9963
9964 switch (pVCpu->iem.s.enmEffOpSize)
9965 {
9966 case IEMMODE_16BIT:
9967 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9968 {
9969 /* register operand */
9970 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9972
9973 IEM_MC_BEGIN(3, 1);
9974 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9975 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9976 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9977 IEM_MC_LOCAL(uint16_t, u16Tmp);
9978
9979 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9980 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9981 IEM_MC_REF_EFLAGS(pEFlags);
9982 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9983 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9984
9985 IEM_MC_ADVANCE_RIP();
9986 IEM_MC_END();
9987 }
9988 else
9989 {
9990 /* memory operand */
9991 IEM_MC_BEGIN(3, 2);
9992 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9993 IEM_MC_ARG(uint16_t, u16Src, 1);
9994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9995 IEM_MC_LOCAL(uint16_t, u16Tmp);
9996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9997
9998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9999 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
10000 IEM_MC_ASSIGN(u16Src, u16Imm);
10001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10002 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10003 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10004 IEM_MC_REF_EFLAGS(pEFlags);
10005 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10006 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10007
10008 IEM_MC_ADVANCE_RIP();
10009 IEM_MC_END();
10010 }
10011 return VINF_SUCCESS;
10012
10013 case IEMMODE_32BIT:
10014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10015 {
10016 /* register operand */
10017 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10019
10020 IEM_MC_BEGIN(3, 1);
10021 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10022 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
10023 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10024 IEM_MC_LOCAL(uint32_t, u32Tmp);
10025
10026 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10027 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10028 IEM_MC_REF_EFLAGS(pEFlags);
10029 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10030 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10031
10032 IEM_MC_ADVANCE_RIP();
10033 IEM_MC_END();
10034 }
10035 else
10036 {
10037 /* memory operand */
10038 IEM_MC_BEGIN(3, 2);
10039 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10040 IEM_MC_ARG(uint32_t, u32Src, 1);
10041 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10042 IEM_MC_LOCAL(uint32_t, u32Tmp);
10043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10044
10045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10046 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
10047 IEM_MC_ASSIGN(u32Src, u32Imm);
10048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10049 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10050 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10051 IEM_MC_REF_EFLAGS(pEFlags);
10052 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10053 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10054
10055 IEM_MC_ADVANCE_RIP();
10056 IEM_MC_END();
10057 }
10058 return VINF_SUCCESS;
10059
10060 case IEMMODE_64BIT:
10061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10062 {
10063 /* register operand */
10064 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10066
10067 IEM_MC_BEGIN(3, 1);
10068 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10069 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10070 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10071 IEM_MC_LOCAL(uint64_t, u64Tmp);
10072
10073 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10074 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10075 IEM_MC_REF_EFLAGS(pEFlags);
10076 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10077 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10078
10079 IEM_MC_ADVANCE_RIP();
10080 IEM_MC_END();
10081 }
10082 else
10083 {
10084 /* memory operand */
10085 IEM_MC_BEGIN(3, 2);
10086 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10087 IEM_MC_ARG(uint64_t, u64Src, 1);
10088 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10089 IEM_MC_LOCAL(uint64_t, u64Tmp);
10090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10091
10092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10093 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10094 IEM_MC_ASSIGN(u64Src, u64Imm);
10095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10096 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10097 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10098 IEM_MC_REF_EFLAGS(pEFlags);
10099 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10100 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10101
10102 IEM_MC_ADVANCE_RIP();
10103 IEM_MC_END();
10104 }
10105 return VINF_SUCCESS;
10106 }
10107 AssertFailedReturn(VERR_IEM_IPE_8);
10108}
10109
10110
10111/** Opcode 0x6c. */
10112FNIEMOP_DEF(iemOp_insb_Yb_DX)
10113{
10114 IEMOP_HLP_MIN_186();
10115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10116 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10117 {
10118 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10119 switch (pVCpu->iem.s.enmEffAddrMode)
10120 {
10121 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10122 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10123 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10125 }
10126 }
10127 else
10128 {
10129 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10130 switch (pVCpu->iem.s.enmEffAddrMode)
10131 {
10132 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10133 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10134 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10136 }
10137 }
10138}
10139
10140
10141/** Opcode 0x6d. */
10142FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10143{
10144 IEMOP_HLP_MIN_186();
10145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10146 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10147 {
10148 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10149 switch (pVCpu->iem.s.enmEffOpSize)
10150 {
10151 case IEMMODE_16BIT:
10152 switch (pVCpu->iem.s.enmEffAddrMode)
10153 {
10154 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10155 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10156 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10158 }
10159 break;
10160 case IEMMODE_64BIT:
10161 case IEMMODE_32BIT:
10162 switch (pVCpu->iem.s.enmEffAddrMode)
10163 {
10164 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10165 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10166 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10168 }
10169 break;
10170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10171 }
10172 }
10173 else
10174 {
10175 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10176 switch (pVCpu->iem.s.enmEffOpSize)
10177 {
10178 case IEMMODE_16BIT:
10179 switch (pVCpu->iem.s.enmEffAddrMode)
10180 {
10181 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10182 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10183 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10185 }
10186 break;
10187 case IEMMODE_64BIT:
10188 case IEMMODE_32BIT:
10189 switch (pVCpu->iem.s.enmEffAddrMode)
10190 {
10191 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10192 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10193 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10195 }
10196 break;
10197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10198 }
10199 }
10200}
10201
10202
10203/** Opcode 0x6e. */
10204FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10205{
10206 IEMOP_HLP_MIN_186();
10207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10208 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10209 {
10210 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10211 switch (pVCpu->iem.s.enmEffAddrMode)
10212 {
10213 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10214 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10215 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10217 }
10218 }
10219 else
10220 {
10221 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10222 switch (pVCpu->iem.s.enmEffAddrMode)
10223 {
10224 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10225 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10226 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10228 }
10229 }
10230}
10231
10232
10233/** Opcode 0x6f. */
10234FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10235{
10236 IEMOP_HLP_MIN_186();
10237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10238 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10239 {
10240 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10241 switch (pVCpu->iem.s.enmEffOpSize)
10242 {
10243 case IEMMODE_16BIT:
10244 switch (pVCpu->iem.s.enmEffAddrMode)
10245 {
10246 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10247 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10248 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10250 }
10251 break;
10252 case IEMMODE_64BIT:
10253 case IEMMODE_32BIT:
10254 switch (pVCpu->iem.s.enmEffAddrMode)
10255 {
10256 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10257 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10258 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10260 }
10261 break;
10262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10263 }
10264 }
10265 else
10266 {
10267 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10268 switch (pVCpu->iem.s.enmEffOpSize)
10269 {
10270 case IEMMODE_16BIT:
10271 switch (pVCpu->iem.s.enmEffAddrMode)
10272 {
10273 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10274 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10275 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10277 }
10278 break;
10279 case IEMMODE_64BIT:
10280 case IEMMODE_32BIT:
10281 switch (pVCpu->iem.s.enmEffAddrMode)
10282 {
10283 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10284 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10285 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10287 }
10288 break;
10289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10290 }
10291 }
10292}
10293
10294
10295/** Opcode 0x70. */
10296FNIEMOP_DEF(iemOp_jo_Jb)
10297{
10298 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10299 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10301 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10302
10303 IEM_MC_BEGIN(0, 0);
10304 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10305 IEM_MC_REL_JMP_S8(i8Imm);
10306 } IEM_MC_ELSE() {
10307 IEM_MC_ADVANCE_RIP();
10308 } IEM_MC_ENDIF();
10309 IEM_MC_END();
10310 return VINF_SUCCESS;
10311}
10312
10313
10314/** Opcode 0x71. */
10315FNIEMOP_DEF(iemOp_jno_Jb)
10316{
10317 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10318 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10321
10322 IEM_MC_BEGIN(0, 0);
10323 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10324 IEM_MC_ADVANCE_RIP();
10325 } IEM_MC_ELSE() {
10326 IEM_MC_REL_JMP_S8(i8Imm);
10327 } IEM_MC_ENDIF();
10328 IEM_MC_END();
10329 return VINF_SUCCESS;
10330}
10331
10332/** Opcode 0x72. */
10333FNIEMOP_DEF(iemOp_jc_Jb)
10334{
10335 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10336 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10338 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10339
10340 IEM_MC_BEGIN(0, 0);
10341 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10342 IEM_MC_REL_JMP_S8(i8Imm);
10343 } IEM_MC_ELSE() {
10344 IEM_MC_ADVANCE_RIP();
10345 } IEM_MC_ENDIF();
10346 IEM_MC_END();
10347 return VINF_SUCCESS;
10348}
10349
10350
10351/** Opcode 0x73. */
10352FNIEMOP_DEF(iemOp_jnc_Jb)
10353{
10354 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10355 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10357 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10358
10359 IEM_MC_BEGIN(0, 0);
10360 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10361 IEM_MC_ADVANCE_RIP();
10362 } IEM_MC_ELSE() {
10363 IEM_MC_REL_JMP_S8(i8Imm);
10364 } IEM_MC_ENDIF();
10365 IEM_MC_END();
10366 return VINF_SUCCESS;
10367}
10368
10369
10370/** Opcode 0x74. */
10371FNIEMOP_DEF(iemOp_je_Jb)
10372{
10373 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10374 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10376 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10377
10378 IEM_MC_BEGIN(0, 0);
10379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10380 IEM_MC_REL_JMP_S8(i8Imm);
10381 } IEM_MC_ELSE() {
10382 IEM_MC_ADVANCE_RIP();
10383 } IEM_MC_ENDIF();
10384 IEM_MC_END();
10385 return VINF_SUCCESS;
10386}
10387
10388
10389/** Opcode 0x75. */
10390FNIEMOP_DEF(iemOp_jne_Jb)
10391{
10392 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10393 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10395 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10396
10397 IEM_MC_BEGIN(0, 0);
10398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10399 IEM_MC_ADVANCE_RIP();
10400 } IEM_MC_ELSE() {
10401 IEM_MC_REL_JMP_S8(i8Imm);
10402 } IEM_MC_ENDIF();
10403 IEM_MC_END();
10404 return VINF_SUCCESS;
10405}
10406
10407
10408/** Opcode 0x76. */
10409FNIEMOP_DEF(iemOp_jbe_Jb)
10410{
10411 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10412 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10414 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10415
10416 IEM_MC_BEGIN(0, 0);
10417 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10418 IEM_MC_REL_JMP_S8(i8Imm);
10419 } IEM_MC_ELSE() {
10420 IEM_MC_ADVANCE_RIP();
10421 } IEM_MC_ENDIF();
10422 IEM_MC_END();
10423 return VINF_SUCCESS;
10424}
10425
10426
10427/** Opcode 0x77. */
10428FNIEMOP_DEF(iemOp_jnbe_Jb)
10429{
10430 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10431 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10433 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10434
10435 IEM_MC_BEGIN(0, 0);
10436 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10437 IEM_MC_ADVANCE_RIP();
10438 } IEM_MC_ELSE() {
10439 IEM_MC_REL_JMP_S8(i8Imm);
10440 } IEM_MC_ENDIF();
10441 IEM_MC_END();
10442 return VINF_SUCCESS;
10443}
10444
10445
10446/** Opcode 0x78. */
10447FNIEMOP_DEF(iemOp_js_Jb)
10448{
10449 IEMOP_MNEMONIC(js_Jb, "js Jb");
10450 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10452 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10453
10454 IEM_MC_BEGIN(0, 0);
10455 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10456 IEM_MC_REL_JMP_S8(i8Imm);
10457 } IEM_MC_ELSE() {
10458 IEM_MC_ADVANCE_RIP();
10459 } IEM_MC_ENDIF();
10460 IEM_MC_END();
10461 return VINF_SUCCESS;
10462}
10463
10464
10465/** Opcode 0x79. */
10466FNIEMOP_DEF(iemOp_jns_Jb)
10467{
10468 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10469 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10471 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10472
10473 IEM_MC_BEGIN(0, 0);
10474 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10475 IEM_MC_ADVANCE_RIP();
10476 } IEM_MC_ELSE() {
10477 IEM_MC_REL_JMP_S8(i8Imm);
10478 } IEM_MC_ENDIF();
10479 IEM_MC_END();
10480 return VINF_SUCCESS;
10481}
10482
10483
10484/** Opcode 0x7a. */
10485FNIEMOP_DEF(iemOp_jp_Jb)
10486{
10487 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10488 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10490 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10491
10492 IEM_MC_BEGIN(0, 0);
10493 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10494 IEM_MC_REL_JMP_S8(i8Imm);
10495 } IEM_MC_ELSE() {
10496 IEM_MC_ADVANCE_RIP();
10497 } IEM_MC_ENDIF();
10498 IEM_MC_END();
10499 return VINF_SUCCESS;
10500}
10501
10502
10503/** Opcode 0x7b. */
10504FNIEMOP_DEF(iemOp_jnp_Jb)
10505{
10506 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10507 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10510
10511 IEM_MC_BEGIN(0, 0);
10512 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10513 IEM_MC_ADVANCE_RIP();
10514 } IEM_MC_ELSE() {
10515 IEM_MC_REL_JMP_S8(i8Imm);
10516 } IEM_MC_ENDIF();
10517 IEM_MC_END();
10518 return VINF_SUCCESS;
10519}
10520
10521
10522/** Opcode 0x7c. */
10523FNIEMOP_DEF(iemOp_jl_Jb)
10524{
10525 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10526 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10528 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10529
10530 IEM_MC_BEGIN(0, 0);
10531 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10532 IEM_MC_REL_JMP_S8(i8Imm);
10533 } IEM_MC_ELSE() {
10534 IEM_MC_ADVANCE_RIP();
10535 } IEM_MC_ENDIF();
10536 IEM_MC_END();
10537 return VINF_SUCCESS;
10538}
10539
10540
10541/** Opcode 0x7d. */
10542FNIEMOP_DEF(iemOp_jnl_Jb)
10543{
10544 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10545 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10547 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10548
10549 IEM_MC_BEGIN(0, 0);
10550 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10551 IEM_MC_ADVANCE_RIP();
10552 } IEM_MC_ELSE() {
10553 IEM_MC_REL_JMP_S8(i8Imm);
10554 } IEM_MC_ENDIF();
10555 IEM_MC_END();
10556 return VINF_SUCCESS;
10557}
10558
10559
10560/** Opcode 0x7e. */
10561FNIEMOP_DEF(iemOp_jle_Jb)
10562{
10563 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10564 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10566 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10567
10568 IEM_MC_BEGIN(0, 0);
10569 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10570 IEM_MC_REL_JMP_S8(i8Imm);
10571 } IEM_MC_ELSE() {
10572 IEM_MC_ADVANCE_RIP();
10573 } IEM_MC_ENDIF();
10574 IEM_MC_END();
10575 return VINF_SUCCESS;
10576}
10577
10578
10579/** Opcode 0x7f. */
10580FNIEMOP_DEF(iemOp_jnle_Jb)
10581{
10582 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10583 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10585 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10586
10587 IEM_MC_BEGIN(0, 0);
10588 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10589 IEM_MC_ADVANCE_RIP();
10590 } IEM_MC_ELSE() {
10591 IEM_MC_REL_JMP_S8(i8Imm);
10592 } IEM_MC_ENDIF();
10593 IEM_MC_END();
10594 return VINF_SUCCESS;
10595}
10596
10597
10598/** Opcode 0x80. */
10599FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10600{
10601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10602 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10603 {
10604 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10605 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10606 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10607 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10608 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10609 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10610 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10611 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10612 }
10613 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10614
10615 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10616 {
10617 /* register target */
10618 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10620 IEM_MC_BEGIN(3, 0);
10621 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10622 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10623 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10624
10625 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10626 IEM_MC_REF_EFLAGS(pEFlags);
10627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10628
10629 IEM_MC_ADVANCE_RIP();
10630 IEM_MC_END();
10631 }
10632 else
10633 {
10634 /* memory target */
10635 uint32_t fAccess;
10636 if (pImpl->pfnLockedU8)
10637 fAccess = IEM_ACCESS_DATA_RW;
10638 else /* CMP */
10639 fAccess = IEM_ACCESS_DATA_R;
10640 IEM_MC_BEGIN(3, 2);
10641 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10642 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10644
10645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10646 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10647 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10648 if (pImpl->pfnLockedU8)
10649 IEMOP_HLP_DONE_DECODING();
10650 else
10651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10652
10653 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10654 IEM_MC_FETCH_EFLAGS(EFlags);
10655 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10656 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10657 else
10658 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10659
10660 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10661 IEM_MC_COMMIT_EFLAGS(EFlags);
10662 IEM_MC_ADVANCE_RIP();
10663 IEM_MC_END();
10664 }
10665 return VINF_SUCCESS;
10666}
10667
10668
10669/** Opcode 0x81. */
10670FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10671{
10672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10673 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10674 {
10675 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10676 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10677 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10678 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10679 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10680 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10681 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10682 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10683 }
10684 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10685
10686 switch (pVCpu->iem.s.enmEffOpSize)
10687 {
10688 case IEMMODE_16BIT:
10689 {
10690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10691 {
10692 /* register target */
10693 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10695 IEM_MC_BEGIN(3, 0);
10696 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10697 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10698 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10699
10700 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10701 IEM_MC_REF_EFLAGS(pEFlags);
10702 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10703
10704 IEM_MC_ADVANCE_RIP();
10705 IEM_MC_END();
10706 }
10707 else
10708 {
10709 /* memory target */
10710 uint32_t fAccess;
10711 if (pImpl->pfnLockedU16)
10712 fAccess = IEM_ACCESS_DATA_RW;
10713 else /* CMP, TEST */
10714 fAccess = IEM_ACCESS_DATA_R;
10715 IEM_MC_BEGIN(3, 2);
10716 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10717 IEM_MC_ARG(uint16_t, u16Src, 1);
10718 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10720
10721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10722 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10723 IEM_MC_ASSIGN(u16Src, u16Imm);
10724 if (pImpl->pfnLockedU16)
10725 IEMOP_HLP_DONE_DECODING();
10726 else
10727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10728 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10729 IEM_MC_FETCH_EFLAGS(EFlags);
10730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10732 else
10733 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10734
10735 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10736 IEM_MC_COMMIT_EFLAGS(EFlags);
10737 IEM_MC_ADVANCE_RIP();
10738 IEM_MC_END();
10739 }
10740 break;
10741 }
10742
10743 case IEMMODE_32BIT:
10744 {
10745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10746 {
10747 /* register target */
10748 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10750 IEM_MC_BEGIN(3, 0);
10751 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10752 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10753 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10754
10755 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10756 IEM_MC_REF_EFLAGS(pEFlags);
10757 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10758 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10759
10760 IEM_MC_ADVANCE_RIP();
10761 IEM_MC_END();
10762 }
10763 else
10764 {
10765 /* memory target */
10766 uint32_t fAccess;
10767 if (pImpl->pfnLockedU32)
10768 fAccess = IEM_ACCESS_DATA_RW;
10769 else /* CMP, TEST */
10770 fAccess = IEM_ACCESS_DATA_R;
10771 IEM_MC_BEGIN(3, 2);
10772 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10773 IEM_MC_ARG(uint32_t, u32Src, 1);
10774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10776
10777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10778 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10779 IEM_MC_ASSIGN(u32Src, u32Imm);
10780 if (pImpl->pfnLockedU32)
10781 IEMOP_HLP_DONE_DECODING();
10782 else
10783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10784 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10785 IEM_MC_FETCH_EFLAGS(EFlags);
10786 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10787 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10788 else
10789 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10790
10791 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10792 IEM_MC_COMMIT_EFLAGS(EFlags);
10793 IEM_MC_ADVANCE_RIP();
10794 IEM_MC_END();
10795 }
10796 break;
10797 }
10798
10799 case IEMMODE_64BIT:
10800 {
10801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10802 {
10803 /* register target */
10804 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10806 IEM_MC_BEGIN(3, 0);
10807 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10808 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10810
10811 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10812 IEM_MC_REF_EFLAGS(pEFlags);
10813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10814
10815 IEM_MC_ADVANCE_RIP();
10816 IEM_MC_END();
10817 }
10818 else
10819 {
10820 /* memory target */
10821 uint32_t fAccess;
10822 if (pImpl->pfnLockedU64)
10823 fAccess = IEM_ACCESS_DATA_RW;
10824 else /* CMP */
10825 fAccess = IEM_ACCESS_DATA_R;
10826 IEM_MC_BEGIN(3, 2);
10827 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10828 IEM_MC_ARG(uint64_t, u64Src, 1);
10829 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10831
10832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10833 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10834 if (pImpl->pfnLockedU64)
10835 IEMOP_HLP_DONE_DECODING();
10836 else
10837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10838 IEM_MC_ASSIGN(u64Src, u64Imm);
10839 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10840 IEM_MC_FETCH_EFLAGS(EFlags);
10841 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10842 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10843 else
10844 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10845
10846 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10847 IEM_MC_COMMIT_EFLAGS(EFlags);
10848 IEM_MC_ADVANCE_RIP();
10849 IEM_MC_END();
10850 }
10851 break;
10852 }
10853 }
10854 return VINF_SUCCESS;
10855}
10856
10857
10858/** Opcode 0x82. */
10859FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10860{
10861 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10862 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10863}
10864
10865
10866/** Opcode 0x83. */
10867FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10868{
10869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10870 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10871 {
10872 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10873 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10874 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10875 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10876 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10877 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10878 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10879 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10880 }
10881 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10882 to the 386 even if absent in the intel reference manuals and some
10883 3rd party opcode listings. */
10884 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10885
10886 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10887 {
10888 /*
10889 * Register target
10890 */
10891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10892 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10893 switch (pVCpu->iem.s.enmEffOpSize)
10894 {
10895 case IEMMODE_16BIT:
10896 {
10897 IEM_MC_BEGIN(3, 0);
10898 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10899 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10901
10902 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10903 IEM_MC_REF_EFLAGS(pEFlags);
10904 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10905
10906 IEM_MC_ADVANCE_RIP();
10907 IEM_MC_END();
10908 break;
10909 }
10910
10911 case IEMMODE_32BIT:
10912 {
10913 IEM_MC_BEGIN(3, 0);
10914 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10915 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10916 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10917
10918 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10919 IEM_MC_REF_EFLAGS(pEFlags);
10920 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10921 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10922
10923 IEM_MC_ADVANCE_RIP();
10924 IEM_MC_END();
10925 break;
10926 }
10927
10928 case IEMMODE_64BIT:
10929 {
10930 IEM_MC_BEGIN(3, 0);
10931 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10932 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10933 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10934
10935 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10936 IEM_MC_REF_EFLAGS(pEFlags);
10937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10938
10939 IEM_MC_ADVANCE_RIP();
10940 IEM_MC_END();
10941 break;
10942 }
10943 }
10944 }
10945 else
10946 {
10947 /*
10948 * Memory target.
10949 */
10950 uint32_t fAccess;
10951 if (pImpl->pfnLockedU16)
10952 fAccess = IEM_ACCESS_DATA_RW;
10953 else /* CMP */
10954 fAccess = IEM_ACCESS_DATA_R;
10955
10956 switch (pVCpu->iem.s.enmEffOpSize)
10957 {
10958 case IEMMODE_16BIT:
10959 {
10960 IEM_MC_BEGIN(3, 2);
10961 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10962 IEM_MC_ARG(uint16_t, u16Src, 1);
10963 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10965
10966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10967 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10968 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10969 if (pImpl->pfnLockedU16)
10970 IEMOP_HLP_DONE_DECODING();
10971 else
10972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10973 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10974 IEM_MC_FETCH_EFLAGS(EFlags);
10975 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10976 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10977 else
10978 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10979
10980 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10981 IEM_MC_COMMIT_EFLAGS(EFlags);
10982 IEM_MC_ADVANCE_RIP();
10983 IEM_MC_END();
10984 break;
10985 }
10986
10987 case IEMMODE_32BIT:
10988 {
10989 IEM_MC_BEGIN(3, 2);
10990 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10991 IEM_MC_ARG(uint32_t, u32Src, 1);
10992 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10994
10995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10996 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10997 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10998 if (pImpl->pfnLockedU32)
10999 IEMOP_HLP_DONE_DECODING();
11000 else
11001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11002 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11003 IEM_MC_FETCH_EFLAGS(EFlags);
11004 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11006 else
11007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
11008
11009 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
11010 IEM_MC_COMMIT_EFLAGS(EFlags);
11011 IEM_MC_ADVANCE_RIP();
11012 IEM_MC_END();
11013 break;
11014 }
11015
11016 case IEMMODE_64BIT:
11017 {
11018 IEM_MC_BEGIN(3, 2);
11019 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11020 IEM_MC_ARG(uint64_t, u64Src, 1);
11021 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11023
11024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11025 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11026 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
11027 if (pImpl->pfnLockedU64)
11028 IEMOP_HLP_DONE_DECODING();
11029 else
11030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11031 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11032 IEM_MC_FETCH_EFLAGS(EFlags);
11033 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11034 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11035 else
11036 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11037
11038 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11039 IEM_MC_COMMIT_EFLAGS(EFlags);
11040 IEM_MC_ADVANCE_RIP();
11041 IEM_MC_END();
11042 break;
11043 }
11044 }
11045 }
11046 return VINF_SUCCESS;
11047}
11048
11049
11050/** Opcode 0x84. */
11051FNIEMOP_DEF(iemOp_test_Eb_Gb)
11052{
11053 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11054 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11055 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11056}
11057
11058
11059/** Opcode 0x85. */
11060FNIEMOP_DEF(iemOp_test_Ev_Gv)
11061{
11062 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11063 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11064 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11065}
11066
11067
11068/** Opcode 0x86. */
11069FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11070{
11071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11072 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11073
11074 /*
11075 * If rm is denoting a register, no more instruction bytes.
11076 */
11077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11078 {
11079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11080
11081 IEM_MC_BEGIN(0, 2);
11082 IEM_MC_LOCAL(uint8_t, uTmp1);
11083 IEM_MC_LOCAL(uint8_t, uTmp2);
11084
11085 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11086 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11087 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11088 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11089
11090 IEM_MC_ADVANCE_RIP();
11091 IEM_MC_END();
11092 }
11093 else
11094 {
11095 /*
11096 * We're accessing memory.
11097 */
11098/** @todo the register must be committed separately! */
11099 IEM_MC_BEGIN(2, 2);
11100 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11101 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11103
11104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11105 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11106 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11107 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11108 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11109
11110 IEM_MC_ADVANCE_RIP();
11111 IEM_MC_END();
11112 }
11113 return VINF_SUCCESS;
11114}
11115
11116
11117/** Opcode 0x87. */
11118FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11119{
11120 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11122
11123 /*
11124 * If rm is denoting a register, no more instruction bytes.
11125 */
11126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11127 {
11128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11129
11130 switch (pVCpu->iem.s.enmEffOpSize)
11131 {
11132 case IEMMODE_16BIT:
11133 IEM_MC_BEGIN(0, 2);
11134 IEM_MC_LOCAL(uint16_t, uTmp1);
11135 IEM_MC_LOCAL(uint16_t, uTmp2);
11136
11137 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11138 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11139 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11140 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11141
11142 IEM_MC_ADVANCE_RIP();
11143 IEM_MC_END();
11144 return VINF_SUCCESS;
11145
11146 case IEMMODE_32BIT:
11147 IEM_MC_BEGIN(0, 2);
11148 IEM_MC_LOCAL(uint32_t, uTmp1);
11149 IEM_MC_LOCAL(uint32_t, uTmp2);
11150
11151 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11152 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11153 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11154 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11155
11156 IEM_MC_ADVANCE_RIP();
11157 IEM_MC_END();
11158 return VINF_SUCCESS;
11159
11160 case IEMMODE_64BIT:
11161 IEM_MC_BEGIN(0, 2);
11162 IEM_MC_LOCAL(uint64_t, uTmp1);
11163 IEM_MC_LOCAL(uint64_t, uTmp2);
11164
11165 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11166 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11167 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11168 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11169
11170 IEM_MC_ADVANCE_RIP();
11171 IEM_MC_END();
11172 return VINF_SUCCESS;
11173
11174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11175 }
11176 }
11177 else
11178 {
11179 /*
11180 * We're accessing memory.
11181 */
11182 switch (pVCpu->iem.s.enmEffOpSize)
11183 {
11184/** @todo the register must be committed separately! */
11185 case IEMMODE_16BIT:
11186 IEM_MC_BEGIN(2, 2);
11187 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11188 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11190
11191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11192 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11193 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11194 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11195 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11196
11197 IEM_MC_ADVANCE_RIP();
11198 IEM_MC_END();
11199 return VINF_SUCCESS;
11200
11201 case IEMMODE_32BIT:
11202 IEM_MC_BEGIN(2, 2);
11203 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11204 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11206
11207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11208 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11209 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11210 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11211 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11212
11213 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11214 IEM_MC_ADVANCE_RIP();
11215 IEM_MC_END();
11216 return VINF_SUCCESS;
11217
11218 case IEMMODE_64BIT:
11219 IEM_MC_BEGIN(2, 2);
11220 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11221 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11223
11224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11225 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11226 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11227 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11228 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11229
11230 IEM_MC_ADVANCE_RIP();
11231 IEM_MC_END();
11232 return VINF_SUCCESS;
11233
11234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11235 }
11236 }
11237}
11238
11239
11240/** Opcode 0x88. */
11241FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11242{
11243 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11244
11245 uint8_t bRm;
11246 IEM_OPCODE_GET_NEXT_U8(&bRm);
11247
11248 /*
11249 * If rm is denoting a register, no more instruction bytes.
11250 */
11251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11252 {
11253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11254 IEM_MC_BEGIN(0, 1);
11255 IEM_MC_LOCAL(uint8_t, u8Value);
11256 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11257 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11258 IEM_MC_ADVANCE_RIP();
11259 IEM_MC_END();
11260 }
11261 else
11262 {
11263 /*
11264 * We're writing a register to memory.
11265 */
11266 IEM_MC_BEGIN(0, 2);
11267 IEM_MC_LOCAL(uint8_t, u8Value);
11268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11271 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11272 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11273 IEM_MC_ADVANCE_RIP();
11274 IEM_MC_END();
11275 }
11276 return VINF_SUCCESS;
11277
11278}
11279
11280
11281/** Opcode 0x89. */
11282FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11283{
11284 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11285
11286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11287
11288 /*
11289 * If rm is denoting a register, no more instruction bytes.
11290 */
11291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11292 {
11293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11294 switch (pVCpu->iem.s.enmEffOpSize)
11295 {
11296 case IEMMODE_16BIT:
11297 IEM_MC_BEGIN(0, 1);
11298 IEM_MC_LOCAL(uint16_t, u16Value);
11299 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11300 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11301 IEM_MC_ADVANCE_RIP();
11302 IEM_MC_END();
11303 break;
11304
11305 case IEMMODE_32BIT:
11306 IEM_MC_BEGIN(0, 1);
11307 IEM_MC_LOCAL(uint32_t, u32Value);
11308 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11309 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11310 IEM_MC_ADVANCE_RIP();
11311 IEM_MC_END();
11312 break;
11313
11314 case IEMMODE_64BIT:
11315 IEM_MC_BEGIN(0, 1);
11316 IEM_MC_LOCAL(uint64_t, u64Value);
11317 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11318 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11319 IEM_MC_ADVANCE_RIP();
11320 IEM_MC_END();
11321 break;
11322 }
11323 }
11324 else
11325 {
11326 /*
11327 * We're writing a register to memory.
11328 */
11329 switch (pVCpu->iem.s.enmEffOpSize)
11330 {
11331 case IEMMODE_16BIT:
11332 IEM_MC_BEGIN(0, 2);
11333 IEM_MC_LOCAL(uint16_t, u16Value);
11334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11337 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11338 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11339 IEM_MC_ADVANCE_RIP();
11340 IEM_MC_END();
11341 break;
11342
11343 case IEMMODE_32BIT:
11344 IEM_MC_BEGIN(0, 2);
11345 IEM_MC_LOCAL(uint32_t, u32Value);
11346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11349 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11351 IEM_MC_ADVANCE_RIP();
11352 IEM_MC_END();
11353 break;
11354
11355 case IEMMODE_64BIT:
11356 IEM_MC_BEGIN(0, 2);
11357 IEM_MC_LOCAL(uint64_t, u64Value);
11358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11361 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11362 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11363 IEM_MC_ADVANCE_RIP();
11364 IEM_MC_END();
11365 break;
11366 }
11367 }
11368 return VINF_SUCCESS;
11369}
11370
11371
11372/** Opcode 0x8a. */
11373FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11374{
11375 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11376
11377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11378
11379 /*
11380 * If rm is denoting a register, no more instruction bytes.
11381 */
11382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11383 {
11384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11385 IEM_MC_BEGIN(0, 1);
11386 IEM_MC_LOCAL(uint8_t, u8Value);
11387 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11388 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11389 IEM_MC_ADVANCE_RIP();
11390 IEM_MC_END();
11391 }
11392 else
11393 {
11394 /*
11395 * We're loading a register from memory.
11396 */
11397 IEM_MC_BEGIN(0, 2);
11398 IEM_MC_LOCAL(uint8_t, u8Value);
11399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11402 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11403 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11404 IEM_MC_ADVANCE_RIP();
11405 IEM_MC_END();
11406 }
11407 return VINF_SUCCESS;
11408}
11409
11410
11411/** Opcode 0x8b. */
11412FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11413{
11414 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11415
11416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11417
11418 /*
11419 * If rm is denoting a register, no more instruction bytes.
11420 */
11421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11422 {
11423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11424 switch (pVCpu->iem.s.enmEffOpSize)
11425 {
11426 case IEMMODE_16BIT:
11427 IEM_MC_BEGIN(0, 1);
11428 IEM_MC_LOCAL(uint16_t, u16Value);
11429 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11430 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11431 IEM_MC_ADVANCE_RIP();
11432 IEM_MC_END();
11433 break;
11434
11435 case IEMMODE_32BIT:
11436 IEM_MC_BEGIN(0, 1);
11437 IEM_MC_LOCAL(uint32_t, u32Value);
11438 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11439 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11440 IEM_MC_ADVANCE_RIP();
11441 IEM_MC_END();
11442 break;
11443
11444 case IEMMODE_64BIT:
11445 IEM_MC_BEGIN(0, 1);
11446 IEM_MC_LOCAL(uint64_t, u64Value);
11447 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11448 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11449 IEM_MC_ADVANCE_RIP();
11450 IEM_MC_END();
11451 break;
11452 }
11453 }
11454 else
11455 {
11456 /*
11457 * We're loading a register from memory.
11458 */
11459 switch (pVCpu->iem.s.enmEffOpSize)
11460 {
11461 case IEMMODE_16BIT:
11462 IEM_MC_BEGIN(0, 2);
11463 IEM_MC_LOCAL(uint16_t, u16Value);
11464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11467 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11468 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11469 IEM_MC_ADVANCE_RIP();
11470 IEM_MC_END();
11471 break;
11472
11473 case IEMMODE_32BIT:
11474 IEM_MC_BEGIN(0, 2);
11475 IEM_MC_LOCAL(uint32_t, u32Value);
11476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11479 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11480 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11481 IEM_MC_ADVANCE_RIP();
11482 IEM_MC_END();
11483 break;
11484
11485 case IEMMODE_64BIT:
11486 IEM_MC_BEGIN(0, 2);
11487 IEM_MC_LOCAL(uint64_t, u64Value);
11488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11491 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11492 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11493 IEM_MC_ADVANCE_RIP();
11494 IEM_MC_END();
11495 break;
11496 }
11497 }
11498 return VINF_SUCCESS;
11499}
11500
11501
11502/** Opcode 0x63. */
11503FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11504{
11505 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11506 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11507 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11508 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11509 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11510}
11511
11512
11513/** Opcode 0x8c. */
11514FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11515{
11516 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11517
11518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11519
11520 /*
11521 * Check that the destination register exists. The REX.R prefix is ignored.
11522 */
11523 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11524 if ( iSegReg > X86_SREG_GS)
11525 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11526
11527 /*
11528 * If rm is denoting a register, no more instruction bytes.
11529 * In that case, the operand size is respected and the upper bits are
11530 * cleared (starting with some pentium).
11531 */
11532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11533 {
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535 switch (pVCpu->iem.s.enmEffOpSize)
11536 {
11537 case IEMMODE_16BIT:
11538 IEM_MC_BEGIN(0, 1);
11539 IEM_MC_LOCAL(uint16_t, u16Value);
11540 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11541 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11542 IEM_MC_ADVANCE_RIP();
11543 IEM_MC_END();
11544 break;
11545
11546 case IEMMODE_32BIT:
11547 IEM_MC_BEGIN(0, 1);
11548 IEM_MC_LOCAL(uint32_t, u32Value);
11549 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11550 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11551 IEM_MC_ADVANCE_RIP();
11552 IEM_MC_END();
11553 break;
11554
11555 case IEMMODE_64BIT:
11556 IEM_MC_BEGIN(0, 1);
11557 IEM_MC_LOCAL(uint64_t, u64Value);
11558 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11559 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11560 IEM_MC_ADVANCE_RIP();
11561 IEM_MC_END();
11562 break;
11563 }
11564 }
11565 else
11566 {
11567 /*
11568 * We're saving the register to memory. The access is word sized
11569 * regardless of operand size prefixes.
11570 */
11571#if 0 /* not necessary */
11572 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11573#endif
11574 IEM_MC_BEGIN(0, 2);
11575 IEM_MC_LOCAL(uint16_t, u16Value);
11576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11579 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11580 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11581 IEM_MC_ADVANCE_RIP();
11582 IEM_MC_END();
11583 }
11584 return VINF_SUCCESS;
11585}
11586
11587
11588
11589
11590/** Opcode 0x8d. */
11591FNIEMOP_DEF(iemOp_lea_Gv_M)
11592{
11593 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11596 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11597
11598 switch (pVCpu->iem.s.enmEffOpSize)
11599 {
11600 case IEMMODE_16BIT:
11601 IEM_MC_BEGIN(0, 2);
11602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11603 IEM_MC_LOCAL(uint16_t, u16Cast);
11604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11606 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11607 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11608 IEM_MC_ADVANCE_RIP();
11609 IEM_MC_END();
11610 return VINF_SUCCESS;
11611
11612 case IEMMODE_32BIT:
11613 IEM_MC_BEGIN(0, 2);
11614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11615 IEM_MC_LOCAL(uint32_t, u32Cast);
11616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11618 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11619 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11620 IEM_MC_ADVANCE_RIP();
11621 IEM_MC_END();
11622 return VINF_SUCCESS;
11623
11624 case IEMMODE_64BIT:
11625 IEM_MC_BEGIN(0, 1);
11626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11629 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11630 IEM_MC_ADVANCE_RIP();
11631 IEM_MC_END();
11632 return VINF_SUCCESS;
11633 }
11634 AssertFailedReturn(VERR_IEM_IPE_7);
11635}
11636
11637
11638/** Opcode 0x8e. */
11639FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11640{
11641 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11642
11643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11644
11645 /*
11646 * The practical operand size is 16-bit.
11647 */
11648#if 0 /* not necessary */
11649 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11650#endif
11651
11652 /*
11653 * Check that the destination register exists and can be used with this
11654 * instruction. The REX.R prefix is ignored.
11655 */
11656 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11657 if ( iSegReg == X86_SREG_CS
11658 || iSegReg > X86_SREG_GS)
11659 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11660
11661 /*
11662 * If rm is denoting a register, no more instruction bytes.
11663 */
11664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11665 {
11666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11667 IEM_MC_BEGIN(2, 0);
11668 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11669 IEM_MC_ARG(uint16_t, u16Value, 1);
11670 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11671 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11672 IEM_MC_END();
11673 }
11674 else
11675 {
11676 /*
11677 * We're loading the register from memory. The access is word sized
11678 * regardless of operand size prefixes.
11679 */
11680 IEM_MC_BEGIN(2, 1);
11681 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11682 IEM_MC_ARG(uint16_t, u16Value, 1);
11683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11686 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11687 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11688 IEM_MC_END();
11689 }
11690 return VINF_SUCCESS;
11691}
11692
11693
11694/** Opcode 0x8f /0. */
11695FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11696{
11697 /* This bugger is rather annoying as it requires rSP to be updated before
11698 doing the effective address calculations. Will eventually require a
11699 split between the R/M+SIB decoding and the effective address
11700 calculation - which is something that is required for any attempt at
11701 reusing this code for a recompiler. It may also be good to have if we
11702 need to delay #UD exception caused by invalid lock prefixes.
11703
11704 For now, we'll do a mostly safe interpreter-only implementation here. */
11705 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11706 * now until tests show it's checked.. */
11707 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11708
11709 /* Register access is relatively easy and can share code. */
11710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11711 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11712
11713 /*
11714 * Memory target.
11715 *
11716 * Intel says that RSP is incremented before it's used in any effective
11717 * address calcuations. This means some serious extra annoyance here since
11718 * we decode and calculate the effective address in one step and like to
11719 * delay committing registers till everything is done.
11720 *
11721 * So, we'll decode and calculate the effective address twice. This will
11722 * require some recoding if turned into a recompiler.
11723 */
11724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11725
11726#ifndef TST_IEM_CHECK_MC
11727 /* Calc effective address with modified ESP. */
11728/** @todo testcase */
11729 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11730 RTGCPTR GCPtrEff;
11731 VBOXSTRICTRC rcStrict;
11732 switch (pVCpu->iem.s.enmEffOpSize)
11733 {
11734 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11735 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11736 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11738 }
11739 if (rcStrict != VINF_SUCCESS)
11740 return rcStrict;
11741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11742
11743 /* Perform the operation - this should be CImpl. */
11744 RTUINT64U TmpRsp;
11745 TmpRsp.u = pCtx->rsp;
11746 switch (pVCpu->iem.s.enmEffOpSize)
11747 {
11748 case IEMMODE_16BIT:
11749 {
11750 uint16_t u16Value;
11751 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11752 if (rcStrict == VINF_SUCCESS)
11753 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11754 break;
11755 }
11756
11757 case IEMMODE_32BIT:
11758 {
11759 uint32_t u32Value;
11760 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11761 if (rcStrict == VINF_SUCCESS)
11762 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11763 break;
11764 }
11765
11766 case IEMMODE_64BIT:
11767 {
11768 uint64_t u64Value;
11769 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11770 if (rcStrict == VINF_SUCCESS)
11771 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11772 break;
11773 }
11774
11775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11776 }
11777 if (rcStrict == VINF_SUCCESS)
11778 {
11779 pCtx->rsp = TmpRsp.u;
11780 iemRegUpdateRipAndClearRF(pVCpu);
11781 }
11782 return rcStrict;
11783
11784#else
11785 return VERR_IEM_IPE_2;
11786#endif
11787}
11788
11789
11790/** Opcode 0x8f. */
11791FNIEMOP_DEF(iemOp_Grp1A)
11792{
11793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11794 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11795 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11796
11797 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11798 /** @todo XOP decoding. */
11799 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11800 return IEMOP_RAISE_INVALID_OPCODE();
11801}
11802
11803
11804/**
11805 * Common 'xchg reg,rAX' helper.
11806 */
11807FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11808{
11809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11810
11811 iReg |= pVCpu->iem.s.uRexB;
11812 switch (pVCpu->iem.s.enmEffOpSize)
11813 {
11814 case IEMMODE_16BIT:
11815 IEM_MC_BEGIN(0, 2);
11816 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11817 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11818 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11819 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11820 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11821 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11822 IEM_MC_ADVANCE_RIP();
11823 IEM_MC_END();
11824 return VINF_SUCCESS;
11825
11826 case IEMMODE_32BIT:
11827 IEM_MC_BEGIN(0, 2);
11828 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11829 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11830 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11831 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11832 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11833 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11834 IEM_MC_ADVANCE_RIP();
11835 IEM_MC_END();
11836 return VINF_SUCCESS;
11837
11838 case IEMMODE_64BIT:
11839 IEM_MC_BEGIN(0, 2);
11840 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11841 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11842 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11843 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11844 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11845 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11846 IEM_MC_ADVANCE_RIP();
11847 IEM_MC_END();
11848 return VINF_SUCCESS;
11849
11850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11851 }
11852}
11853
11854
11855/** Opcode 0x90. */
11856FNIEMOP_DEF(iemOp_nop)
11857{
11858 /* R8/R8D and RAX/EAX can be exchanged. */
11859 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11860 {
11861 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11862 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11863 }
11864
11865 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11866 IEMOP_MNEMONIC(pause, "pause");
11867 else
11868 IEMOP_MNEMONIC(nop, "nop");
11869 IEM_MC_BEGIN(0, 0);
11870 IEM_MC_ADVANCE_RIP();
11871 IEM_MC_END();
11872 return VINF_SUCCESS;
11873}
11874
11875
11876/** Opcode 0x91. */
11877FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11878{
11879 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11880 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11881}
11882
11883
11884/** Opcode 0x92. */
11885FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11886{
11887 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11888 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11889}
11890
11891
11892/** Opcode 0x93. */
11893FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11894{
11895 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11896 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11897}
11898
11899
11900/** Opcode 0x94. */
11901FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11902{
11903 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11904 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11905}
11906
11907
11908/** Opcode 0x95. */
11909FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11910{
11911 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11912 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11913}
11914
11915
11916/** Opcode 0x96. */
11917FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11918{
11919 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11920 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11921}
11922
11923
11924/** Opcode 0x97. */
11925FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11926{
11927 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11928 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11929}
11930
11931
11932/** Opcode 0x98. */
11933FNIEMOP_DEF(iemOp_cbw)
11934{
11935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11936 switch (pVCpu->iem.s.enmEffOpSize)
11937 {
11938 case IEMMODE_16BIT:
11939 IEMOP_MNEMONIC(cbw, "cbw");
11940 IEM_MC_BEGIN(0, 1);
11941 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11942 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11943 } IEM_MC_ELSE() {
11944 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11945 } IEM_MC_ENDIF();
11946 IEM_MC_ADVANCE_RIP();
11947 IEM_MC_END();
11948 return VINF_SUCCESS;
11949
11950 case IEMMODE_32BIT:
11951 IEMOP_MNEMONIC(cwde, "cwde");
11952 IEM_MC_BEGIN(0, 1);
11953 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11954 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11955 } IEM_MC_ELSE() {
11956 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11957 } IEM_MC_ENDIF();
11958 IEM_MC_ADVANCE_RIP();
11959 IEM_MC_END();
11960 return VINF_SUCCESS;
11961
11962 case IEMMODE_64BIT:
11963 IEMOP_MNEMONIC(cdqe, "cdqe");
11964 IEM_MC_BEGIN(0, 1);
11965 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11966 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11967 } IEM_MC_ELSE() {
11968 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11969 } IEM_MC_ENDIF();
11970 IEM_MC_ADVANCE_RIP();
11971 IEM_MC_END();
11972 return VINF_SUCCESS;
11973
11974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11975 }
11976}
11977
11978
11979/** Opcode 0x99. */
11980FNIEMOP_DEF(iemOp_cwd)
11981{
11982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11983 switch (pVCpu->iem.s.enmEffOpSize)
11984 {
11985 case IEMMODE_16BIT:
11986 IEMOP_MNEMONIC(cwd, "cwd");
11987 IEM_MC_BEGIN(0, 1);
11988 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11989 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11990 } IEM_MC_ELSE() {
11991 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11992 } IEM_MC_ENDIF();
11993 IEM_MC_ADVANCE_RIP();
11994 IEM_MC_END();
11995 return VINF_SUCCESS;
11996
11997 case IEMMODE_32BIT:
11998 IEMOP_MNEMONIC(cdq, "cdq");
11999 IEM_MC_BEGIN(0, 1);
12000 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12001 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
12002 } IEM_MC_ELSE() {
12003 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
12004 } IEM_MC_ENDIF();
12005 IEM_MC_ADVANCE_RIP();
12006 IEM_MC_END();
12007 return VINF_SUCCESS;
12008
12009 case IEMMODE_64BIT:
12010 IEMOP_MNEMONIC(cqo, "cqo");
12011 IEM_MC_BEGIN(0, 1);
12012 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
12013 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
12014 } IEM_MC_ELSE() {
12015 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
12016 } IEM_MC_ENDIF();
12017 IEM_MC_ADVANCE_RIP();
12018 IEM_MC_END();
12019 return VINF_SUCCESS;
12020
12021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12022 }
12023}
12024
12025
12026/** Opcode 0x9a. */
12027FNIEMOP_DEF(iemOp_call_Ap)
12028{
12029 IEMOP_MNEMONIC(call_Ap, "call Ap");
12030 IEMOP_HLP_NO_64BIT();
12031
12032 /* Decode the far pointer address and pass it on to the far call C implementation. */
12033 uint32_t offSeg;
12034 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12035 IEM_OPCODE_GET_NEXT_U32(&offSeg);
12036 else
12037 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
12038 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
12039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12040 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
12041}
12042
12043
12044/** Opcode 0x9b. (aka fwait) */
12045FNIEMOP_DEF(iemOp_wait)
12046{
12047 IEMOP_MNEMONIC(wait, "wait");
12048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12049
12050 IEM_MC_BEGIN(0, 0);
12051 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12052 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12053 IEM_MC_ADVANCE_RIP();
12054 IEM_MC_END();
12055 return VINF_SUCCESS;
12056}
12057
12058
12059/** Opcode 0x9c. */
12060FNIEMOP_DEF(iemOp_pushf_Fv)
12061{
12062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12063 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12064 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12065}
12066
12067
12068/** Opcode 0x9d. */
12069FNIEMOP_DEF(iemOp_popf_Fv)
12070{
12071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12072 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12073 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12074}
12075
12076
12077/** Opcode 0x9e. */
12078FNIEMOP_DEF(iemOp_sahf)
12079{
12080 IEMOP_MNEMONIC(sahf, "sahf");
12081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12082 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12083 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12084 return IEMOP_RAISE_INVALID_OPCODE();
12085 IEM_MC_BEGIN(0, 2);
12086 IEM_MC_LOCAL(uint32_t, u32Flags);
12087 IEM_MC_LOCAL(uint32_t, EFlags);
12088 IEM_MC_FETCH_EFLAGS(EFlags);
12089 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12090 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12091 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12092 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12093 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12094 IEM_MC_COMMIT_EFLAGS(EFlags);
12095 IEM_MC_ADVANCE_RIP();
12096 IEM_MC_END();
12097 return VINF_SUCCESS;
12098}
12099
12100
12101/** Opcode 0x9f. */
12102FNIEMOP_DEF(iemOp_lahf)
12103{
12104 IEMOP_MNEMONIC(lahf, "lahf");
12105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12106 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12107 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12108 return IEMOP_RAISE_INVALID_OPCODE();
12109 IEM_MC_BEGIN(0, 1);
12110 IEM_MC_LOCAL(uint8_t, u8Flags);
12111 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12112 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12113 IEM_MC_ADVANCE_RIP();
12114 IEM_MC_END();
12115 return VINF_SUCCESS;
12116}
12117
12118
12119/**
12120 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12121 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12122 * prefixes. Will return on failures.
12123 * @param a_GCPtrMemOff The variable to store the offset in.
12124 */
12125#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12126 do \
12127 { \
12128 switch (pVCpu->iem.s.enmEffAddrMode) \
12129 { \
12130 case IEMMODE_16BIT: \
12131 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12132 break; \
12133 case IEMMODE_32BIT: \
12134 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12135 break; \
12136 case IEMMODE_64BIT: \
12137 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12138 break; \
12139 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12140 } \
12141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12142 } while (0)
12143
12144/** Opcode 0xa0. */
12145FNIEMOP_DEF(iemOp_mov_Al_Ob)
12146{
12147 /*
12148 * Get the offset and fend of lock prefixes.
12149 */
12150 RTGCPTR GCPtrMemOff;
12151 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12152
12153 /*
12154 * Fetch AL.
12155 */
12156 IEM_MC_BEGIN(0,1);
12157 IEM_MC_LOCAL(uint8_t, u8Tmp);
12158 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12159 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12160 IEM_MC_ADVANCE_RIP();
12161 IEM_MC_END();
12162 return VINF_SUCCESS;
12163}
12164
12165
12166/** Opcode 0xa1. */
12167FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12168{
12169 /*
12170 * Get the offset and fend of lock prefixes.
12171 */
12172 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12173 RTGCPTR GCPtrMemOff;
12174 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12175
12176 /*
12177 * Fetch rAX.
12178 */
12179 switch (pVCpu->iem.s.enmEffOpSize)
12180 {
12181 case IEMMODE_16BIT:
12182 IEM_MC_BEGIN(0,1);
12183 IEM_MC_LOCAL(uint16_t, u16Tmp);
12184 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12185 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12186 IEM_MC_ADVANCE_RIP();
12187 IEM_MC_END();
12188 return VINF_SUCCESS;
12189
12190 case IEMMODE_32BIT:
12191 IEM_MC_BEGIN(0,1);
12192 IEM_MC_LOCAL(uint32_t, u32Tmp);
12193 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12194 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12195 IEM_MC_ADVANCE_RIP();
12196 IEM_MC_END();
12197 return VINF_SUCCESS;
12198
12199 case IEMMODE_64BIT:
12200 IEM_MC_BEGIN(0,1);
12201 IEM_MC_LOCAL(uint64_t, u64Tmp);
12202 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12203 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12204 IEM_MC_ADVANCE_RIP();
12205 IEM_MC_END();
12206 return VINF_SUCCESS;
12207
12208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12209 }
12210}
12211
12212
12213/** Opcode 0xa2. */
12214FNIEMOP_DEF(iemOp_mov_Ob_AL)
12215{
12216 /*
12217 * Get the offset and fend of lock prefixes.
12218 */
12219 RTGCPTR GCPtrMemOff;
12220 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12221
12222 /*
12223 * Store AL.
12224 */
12225 IEM_MC_BEGIN(0,1);
12226 IEM_MC_LOCAL(uint8_t, u8Tmp);
12227 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12228 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12229 IEM_MC_ADVANCE_RIP();
12230 IEM_MC_END();
12231 return VINF_SUCCESS;
12232}
12233
12234
12235/** Opcode 0xa3. */
12236FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12237{
12238 /*
12239 * Get the offset and fend of lock prefixes.
12240 */
12241 RTGCPTR GCPtrMemOff;
12242 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12243
12244 /*
12245 * Store rAX.
12246 */
12247 switch (pVCpu->iem.s.enmEffOpSize)
12248 {
12249 case IEMMODE_16BIT:
12250 IEM_MC_BEGIN(0,1);
12251 IEM_MC_LOCAL(uint16_t, u16Tmp);
12252 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12253 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12254 IEM_MC_ADVANCE_RIP();
12255 IEM_MC_END();
12256 return VINF_SUCCESS;
12257
12258 case IEMMODE_32BIT:
12259 IEM_MC_BEGIN(0,1);
12260 IEM_MC_LOCAL(uint32_t, u32Tmp);
12261 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12262 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12263 IEM_MC_ADVANCE_RIP();
12264 IEM_MC_END();
12265 return VINF_SUCCESS;
12266
12267 case IEMMODE_64BIT:
12268 IEM_MC_BEGIN(0,1);
12269 IEM_MC_LOCAL(uint64_t, u64Tmp);
12270 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12271 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12272 IEM_MC_ADVANCE_RIP();
12273 IEM_MC_END();
12274 return VINF_SUCCESS;
12275
12276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12277 }
12278}
12279
12280/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12281#define IEM_MOVS_CASE(ValBits, AddrBits) \
12282 IEM_MC_BEGIN(0, 2); \
12283 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12284 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12285 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12286 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12287 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12288 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12290 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12291 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12292 } IEM_MC_ELSE() { \
12293 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12294 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12295 } IEM_MC_ENDIF(); \
12296 IEM_MC_ADVANCE_RIP(); \
12297 IEM_MC_END();
12298
12299/** Opcode 0xa4. */
12300FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12301{
12302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12303
12304 /*
12305 * Use the C implementation if a repeat prefix is encountered.
12306 */
12307 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12308 {
12309 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12310 switch (pVCpu->iem.s.enmEffAddrMode)
12311 {
12312 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12313 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12314 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12316 }
12317 }
12318 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12319
12320 /*
12321 * Sharing case implementation with movs[wdq] below.
12322 */
12323 switch (pVCpu->iem.s.enmEffAddrMode)
12324 {
12325 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12326 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12327 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12329 }
12330 return VINF_SUCCESS;
12331}
12332
12333
12334/** Opcode 0xa5. */
12335FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12336{
12337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12338
12339 /*
12340 * Use the C implementation if a repeat prefix is encountered.
12341 */
12342 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12343 {
12344 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12345 switch (pVCpu->iem.s.enmEffOpSize)
12346 {
12347 case IEMMODE_16BIT:
12348 switch (pVCpu->iem.s.enmEffAddrMode)
12349 {
12350 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12351 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12352 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12354 }
12355 break;
12356 case IEMMODE_32BIT:
12357 switch (pVCpu->iem.s.enmEffAddrMode)
12358 {
12359 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12360 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12361 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12363 }
12364 case IEMMODE_64BIT:
12365 switch (pVCpu->iem.s.enmEffAddrMode)
12366 {
12367 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12368 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12369 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12371 }
12372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12373 }
12374 }
12375 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12376
12377 /*
12378 * Annoying double switch here.
12379 * Using ugly macro for implementing the cases, sharing it with movsb.
12380 */
12381 switch (pVCpu->iem.s.enmEffOpSize)
12382 {
12383 case IEMMODE_16BIT:
12384 switch (pVCpu->iem.s.enmEffAddrMode)
12385 {
12386 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12387 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12388 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12390 }
12391 break;
12392
12393 case IEMMODE_32BIT:
12394 switch (pVCpu->iem.s.enmEffAddrMode)
12395 {
12396 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12397 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12398 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12400 }
12401 break;
12402
12403 case IEMMODE_64BIT:
12404 switch (pVCpu->iem.s.enmEffAddrMode)
12405 {
12406 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12407 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12408 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12410 }
12411 break;
12412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12413 }
12414 return VINF_SUCCESS;
12415}
12416
12417#undef IEM_MOVS_CASE
12418
12419/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12420#define IEM_CMPS_CASE(ValBits, AddrBits) \
12421 IEM_MC_BEGIN(3, 3); \
12422 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12423 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12424 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12425 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12426 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12427 \
12428 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12429 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12430 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12431 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12432 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12433 IEM_MC_REF_EFLAGS(pEFlags); \
12434 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12435 \
12436 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12437 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12438 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12439 } IEM_MC_ELSE() { \
12440 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12441 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12442 } IEM_MC_ENDIF(); \
12443 IEM_MC_ADVANCE_RIP(); \
12444 IEM_MC_END(); \
12445
12446/** Opcode 0xa6. */
12447FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12448{
12449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12450
12451 /*
12452 * Use the C implementation if a repeat prefix is encountered.
12453 */
12454 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12455 {
12456 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12457 switch (pVCpu->iem.s.enmEffAddrMode)
12458 {
12459 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12460 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12461 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12463 }
12464 }
12465 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12466 {
12467 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12468 switch (pVCpu->iem.s.enmEffAddrMode)
12469 {
12470 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12471 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12472 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12474 }
12475 }
12476 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12477
12478 /*
12479 * Sharing case implementation with cmps[wdq] below.
12480 */
12481 switch (pVCpu->iem.s.enmEffAddrMode)
12482 {
12483 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12484 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12485 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12487 }
12488 return VINF_SUCCESS;
12489
12490}
12491
12492
12493/** Opcode 0xa7. */
12494FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12495{
12496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12497
12498 /*
12499 * Use the C implementation if a repeat prefix is encountered.
12500 */
12501 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12502 {
12503 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12504 switch (pVCpu->iem.s.enmEffOpSize)
12505 {
12506 case IEMMODE_16BIT:
12507 switch (pVCpu->iem.s.enmEffAddrMode)
12508 {
12509 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12510 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12511 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12513 }
12514 break;
12515 case IEMMODE_32BIT:
12516 switch (pVCpu->iem.s.enmEffAddrMode)
12517 {
12518 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12519 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12520 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12522 }
12523 case IEMMODE_64BIT:
12524 switch (pVCpu->iem.s.enmEffAddrMode)
12525 {
12526 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12527 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12528 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12530 }
12531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12532 }
12533 }
12534
12535 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12536 {
12537 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12538 switch (pVCpu->iem.s.enmEffOpSize)
12539 {
12540 case IEMMODE_16BIT:
12541 switch (pVCpu->iem.s.enmEffAddrMode)
12542 {
12543 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12544 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12545 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12547 }
12548 break;
12549 case IEMMODE_32BIT:
12550 switch (pVCpu->iem.s.enmEffAddrMode)
12551 {
12552 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12553 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12554 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12556 }
12557 case IEMMODE_64BIT:
12558 switch (pVCpu->iem.s.enmEffAddrMode)
12559 {
12560 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12561 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12562 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12564 }
12565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12566 }
12567 }
12568
12569 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12570
12571 /*
12572 * Annoying double switch here.
12573 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12574 */
12575 switch (pVCpu->iem.s.enmEffOpSize)
12576 {
12577 case IEMMODE_16BIT:
12578 switch (pVCpu->iem.s.enmEffAddrMode)
12579 {
12580 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12581 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12582 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12584 }
12585 break;
12586
12587 case IEMMODE_32BIT:
12588 switch (pVCpu->iem.s.enmEffAddrMode)
12589 {
12590 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12591 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12592 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12594 }
12595 break;
12596
12597 case IEMMODE_64BIT:
12598 switch (pVCpu->iem.s.enmEffAddrMode)
12599 {
12600 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12601 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12602 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12604 }
12605 break;
12606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12607 }
12608 return VINF_SUCCESS;
12609
12610}
12611
12612#undef IEM_CMPS_CASE
12613
12614/** Opcode 0xa8. */
12615FNIEMOP_DEF(iemOp_test_AL_Ib)
12616{
12617 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12618 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12619 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12620}
12621
12622
12623/** Opcode 0xa9. */
12624FNIEMOP_DEF(iemOp_test_eAX_Iz)
12625{
12626 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12628 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12629}
12630
12631
12632/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12633#define IEM_STOS_CASE(ValBits, AddrBits) \
12634 IEM_MC_BEGIN(0, 2); \
12635 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12636 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12637 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12638 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12639 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12641 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12642 } IEM_MC_ELSE() { \
12643 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12644 } IEM_MC_ENDIF(); \
12645 IEM_MC_ADVANCE_RIP(); \
12646 IEM_MC_END(); \
12647
12648/** Opcode 0xaa. */
12649FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12650{
12651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12652
12653 /*
12654 * Use the C implementation if a repeat prefix is encountered.
12655 */
12656 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12657 {
12658 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12659 switch (pVCpu->iem.s.enmEffAddrMode)
12660 {
12661 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12662 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12663 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12665 }
12666 }
12667 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12668
12669 /*
12670 * Sharing case implementation with stos[wdq] below.
12671 */
12672 switch (pVCpu->iem.s.enmEffAddrMode)
12673 {
12674 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12675 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12676 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12678 }
12679 return VINF_SUCCESS;
12680}
12681
12682
12683/** Opcode 0xab. */
12684FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12685{
12686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12687
12688 /*
12689 * Use the C implementation if a repeat prefix is encountered.
12690 */
12691 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12692 {
12693 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12694 switch (pVCpu->iem.s.enmEffOpSize)
12695 {
12696 case IEMMODE_16BIT:
12697 switch (pVCpu->iem.s.enmEffAddrMode)
12698 {
12699 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12700 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12701 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12703 }
12704 break;
12705 case IEMMODE_32BIT:
12706 switch (pVCpu->iem.s.enmEffAddrMode)
12707 {
12708 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12709 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12710 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12712 }
12713 case IEMMODE_64BIT:
12714 switch (pVCpu->iem.s.enmEffAddrMode)
12715 {
12716 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12717 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12718 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12720 }
12721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12722 }
12723 }
12724 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12725
12726 /*
12727 * Annoying double switch here.
12728 * Using ugly macro for implementing the cases, sharing it with stosb.
12729 */
12730 switch (pVCpu->iem.s.enmEffOpSize)
12731 {
12732 case IEMMODE_16BIT:
12733 switch (pVCpu->iem.s.enmEffAddrMode)
12734 {
12735 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12736 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12737 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12739 }
12740 break;
12741
12742 case IEMMODE_32BIT:
12743 switch (pVCpu->iem.s.enmEffAddrMode)
12744 {
12745 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12746 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12747 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12749 }
12750 break;
12751
12752 case IEMMODE_64BIT:
12753 switch (pVCpu->iem.s.enmEffAddrMode)
12754 {
12755 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12756 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12757 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12759 }
12760 break;
12761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12762 }
12763 return VINF_SUCCESS;
12764}
12765
12766#undef IEM_STOS_CASE
12767
12768/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12769#define IEM_LODS_CASE(ValBits, AddrBits) \
12770 IEM_MC_BEGIN(0, 2); \
12771 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12772 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12773 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12774 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12775 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12776 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12777 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12778 } IEM_MC_ELSE() { \
12779 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12780 } IEM_MC_ENDIF(); \
12781 IEM_MC_ADVANCE_RIP(); \
12782 IEM_MC_END();
12783
12784/** Opcode 0xac. */
12785FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12786{
12787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12788
12789 /*
12790 * Use the C implementation if a repeat prefix is encountered.
12791 */
12792 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12793 {
12794 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12795 switch (pVCpu->iem.s.enmEffAddrMode)
12796 {
12797 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12798 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12799 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12801 }
12802 }
12803 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12804
12805 /*
12806 * Sharing case implementation with stos[wdq] below.
12807 */
12808 switch (pVCpu->iem.s.enmEffAddrMode)
12809 {
12810 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12811 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12812 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12814 }
12815 return VINF_SUCCESS;
12816}
12817
12818
12819/** Opcode 0xad. */
12820FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12821{
12822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12823
12824 /*
12825 * Use the C implementation if a repeat prefix is encountered.
12826 */
12827 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12828 {
12829 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12830 switch (pVCpu->iem.s.enmEffOpSize)
12831 {
12832 case IEMMODE_16BIT:
12833 switch (pVCpu->iem.s.enmEffAddrMode)
12834 {
12835 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12836 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12837 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12839 }
12840 break;
12841 case IEMMODE_32BIT:
12842 switch (pVCpu->iem.s.enmEffAddrMode)
12843 {
12844 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12845 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12846 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12848 }
12849 case IEMMODE_64BIT:
12850 switch (pVCpu->iem.s.enmEffAddrMode)
12851 {
12852 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12853 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12854 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12856 }
12857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12858 }
12859 }
12860 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12861
12862 /*
12863 * Annoying double switch here.
12864 * Using ugly macro for implementing the cases, sharing it with lodsb.
12865 */
12866 switch (pVCpu->iem.s.enmEffOpSize)
12867 {
12868 case IEMMODE_16BIT:
12869 switch (pVCpu->iem.s.enmEffAddrMode)
12870 {
12871 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12872 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12873 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12875 }
12876 break;
12877
12878 case IEMMODE_32BIT:
12879 switch (pVCpu->iem.s.enmEffAddrMode)
12880 {
12881 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12882 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12883 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12885 }
12886 break;
12887
12888 case IEMMODE_64BIT:
12889 switch (pVCpu->iem.s.enmEffAddrMode)
12890 {
12891 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12892 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12893 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12895 }
12896 break;
12897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12898 }
12899 return VINF_SUCCESS;
12900}
12901
12902#undef IEM_LODS_CASE
12903
12904/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12905#define IEM_SCAS_CASE(ValBits, AddrBits) \
12906 IEM_MC_BEGIN(3, 2); \
12907 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12908 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12909 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12910 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12911 \
12912 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12913 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12914 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12915 IEM_MC_REF_EFLAGS(pEFlags); \
12916 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12917 \
12918 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12919 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12920 } IEM_MC_ELSE() { \
12921 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12922 } IEM_MC_ENDIF(); \
12923 IEM_MC_ADVANCE_RIP(); \
12924 IEM_MC_END();
12925
12926/** Opcode 0xae. */
12927FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12928{
12929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12930
12931 /*
12932 * Use the C implementation if a repeat prefix is encountered.
12933 */
12934 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12935 {
12936 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12937 switch (pVCpu->iem.s.enmEffAddrMode)
12938 {
12939 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12940 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12941 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12943 }
12944 }
12945 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12946 {
12947 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12948 switch (pVCpu->iem.s.enmEffAddrMode)
12949 {
12950 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12951 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12952 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12954 }
12955 }
12956 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12957
12958 /*
12959 * Sharing case implementation with stos[wdq] below.
12960 */
12961 switch (pVCpu->iem.s.enmEffAddrMode)
12962 {
12963 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12964 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12965 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12967 }
12968 return VINF_SUCCESS;
12969}
12970
12971
12972/** Opcode 0xaf. */
12973FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12974{
12975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12976
12977 /*
12978 * Use the C implementation if a repeat prefix is encountered.
12979 */
12980 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12981 {
12982 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12983 switch (pVCpu->iem.s.enmEffOpSize)
12984 {
12985 case IEMMODE_16BIT:
12986 switch (pVCpu->iem.s.enmEffAddrMode)
12987 {
12988 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12989 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12990 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12992 }
12993 break;
12994 case IEMMODE_32BIT:
12995 switch (pVCpu->iem.s.enmEffAddrMode)
12996 {
12997 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12998 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12999 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
13000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13001 }
13002 case IEMMODE_64BIT:
13003 switch (pVCpu->iem.s.enmEffAddrMode)
13004 {
13005 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
13006 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
13007 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
13008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13009 }
13010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13011 }
13012 }
13013 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13014 {
13015 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
13016 switch (pVCpu->iem.s.enmEffOpSize)
13017 {
13018 case IEMMODE_16BIT:
13019 switch (pVCpu->iem.s.enmEffAddrMode)
13020 {
13021 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
13022 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
13023 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
13024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13025 }
13026 break;
13027 case IEMMODE_32BIT:
13028 switch (pVCpu->iem.s.enmEffAddrMode)
13029 {
13030 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
13031 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
13032 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
13033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13034 }
13035 case IEMMODE_64BIT:
13036 switch (pVCpu->iem.s.enmEffAddrMode)
13037 {
13038 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
13039 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
13040 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
13041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13042 }
13043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13044 }
13045 }
13046 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
13047
13048 /*
13049 * Annoying double switch here.
13050 * Using ugly macro for implementing the cases, sharing it with scasb.
13051 */
13052 switch (pVCpu->iem.s.enmEffOpSize)
13053 {
13054 case IEMMODE_16BIT:
13055 switch (pVCpu->iem.s.enmEffAddrMode)
13056 {
13057 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13058 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13059 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13061 }
13062 break;
13063
13064 case IEMMODE_32BIT:
13065 switch (pVCpu->iem.s.enmEffAddrMode)
13066 {
13067 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13068 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13069 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13071 }
13072 break;
13073
13074 case IEMMODE_64BIT:
13075 switch (pVCpu->iem.s.enmEffAddrMode)
13076 {
13077 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13078 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13079 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13081 }
13082 break;
13083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13084 }
13085 return VINF_SUCCESS;
13086}
13087
13088#undef IEM_SCAS_CASE
13089
13090/**
13091 * Common 'mov r8, imm8' helper.
13092 */
13093FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13094{
13095 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13097
13098 IEM_MC_BEGIN(0, 1);
13099 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13100 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13101 IEM_MC_ADVANCE_RIP();
13102 IEM_MC_END();
13103
13104 return VINF_SUCCESS;
13105}
13106
13107
13108/** Opcode 0xb0. */
13109FNIEMOP_DEF(iemOp_mov_AL_Ib)
13110{
13111 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13112 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13113}
13114
13115
13116/** Opcode 0xb1. */
13117FNIEMOP_DEF(iemOp_CL_Ib)
13118{
13119 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13120 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13121}
13122
13123
13124/** Opcode 0xb2. */
13125FNIEMOP_DEF(iemOp_DL_Ib)
13126{
13127 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13128 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13129}
13130
13131
13132/** Opcode 0xb3. */
13133FNIEMOP_DEF(iemOp_BL_Ib)
13134{
13135 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13136 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13137}
13138
13139
13140/** Opcode 0xb4. */
13141FNIEMOP_DEF(iemOp_mov_AH_Ib)
13142{
13143 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13144 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13145}
13146
13147
13148/** Opcode 0xb5. */
13149FNIEMOP_DEF(iemOp_CH_Ib)
13150{
13151 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13152 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13153}
13154
13155
13156/** Opcode 0xb6. */
13157FNIEMOP_DEF(iemOp_DH_Ib)
13158{
13159 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13160 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13161}
13162
13163
13164/** Opcode 0xb7. */
13165FNIEMOP_DEF(iemOp_BH_Ib)
13166{
13167 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13168 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13169}
13170
13171
13172/**
13173 * Common 'mov regX,immX' helper.
13174 */
13175FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13176{
13177 switch (pVCpu->iem.s.enmEffOpSize)
13178 {
13179 case IEMMODE_16BIT:
13180 {
13181 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13183
13184 IEM_MC_BEGIN(0, 1);
13185 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13186 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13187 IEM_MC_ADVANCE_RIP();
13188 IEM_MC_END();
13189 break;
13190 }
13191
13192 case IEMMODE_32BIT:
13193 {
13194 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13196
13197 IEM_MC_BEGIN(0, 1);
13198 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13199 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13200 IEM_MC_ADVANCE_RIP();
13201 IEM_MC_END();
13202 break;
13203 }
13204 case IEMMODE_64BIT:
13205 {
13206 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13208
13209 IEM_MC_BEGIN(0, 1);
13210 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13211 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13212 IEM_MC_ADVANCE_RIP();
13213 IEM_MC_END();
13214 break;
13215 }
13216 }
13217
13218 return VINF_SUCCESS;
13219}
13220
13221
13222/** Opcode 0xb8. */
13223FNIEMOP_DEF(iemOp_eAX_Iv)
13224{
13225 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13226 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13227}
13228
13229
13230/** Opcode 0xb9. */
13231FNIEMOP_DEF(iemOp_eCX_Iv)
13232{
13233 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13234 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13235}
13236
13237
13238/** Opcode 0xba. */
13239FNIEMOP_DEF(iemOp_eDX_Iv)
13240{
13241 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13242 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13243}
13244
13245
13246/** Opcode 0xbb. */
13247FNIEMOP_DEF(iemOp_eBX_Iv)
13248{
13249 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13250 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13251}
13252
13253
13254/** Opcode 0xbc. */
13255FNIEMOP_DEF(iemOp_eSP_Iv)
13256{
13257 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13258 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13259}
13260
13261
13262/** Opcode 0xbd. */
13263FNIEMOP_DEF(iemOp_eBP_Iv)
13264{
13265 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13266 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13267}
13268
13269
13270/** Opcode 0xbe. */
13271FNIEMOP_DEF(iemOp_eSI_Iv)
13272{
13273 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13274 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13275}
13276
13277
13278/** Opcode 0xbf. */
13279FNIEMOP_DEF(iemOp_eDI_Iv)
13280{
13281 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13282 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13283}
13284
13285
13286/** Opcode 0xc0. */
13287FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13288{
13289 IEMOP_HLP_MIN_186();
13290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13291 PCIEMOPSHIFTSIZES pImpl;
13292 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13293 {
13294 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13295 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13296 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13297 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13298 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13299 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13300 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13301 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13302 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13303 }
13304 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13305
13306 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13307 {
13308 /* register */
13309 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13311 IEM_MC_BEGIN(3, 0);
13312 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13313 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13315 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13316 IEM_MC_REF_EFLAGS(pEFlags);
13317 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13318 IEM_MC_ADVANCE_RIP();
13319 IEM_MC_END();
13320 }
13321 else
13322 {
13323 /* memory */
13324 IEM_MC_BEGIN(3, 2);
13325 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13326 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13327 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13329
13330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13331 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13332 IEM_MC_ASSIGN(cShiftArg, cShift);
13333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13334 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13335 IEM_MC_FETCH_EFLAGS(EFlags);
13336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13337
13338 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13339 IEM_MC_COMMIT_EFLAGS(EFlags);
13340 IEM_MC_ADVANCE_RIP();
13341 IEM_MC_END();
13342 }
13343 return VINF_SUCCESS;
13344}
13345
13346
13347/** Opcode 0xc1. */
13348FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13349{
13350 IEMOP_HLP_MIN_186();
13351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13352 PCIEMOPSHIFTSIZES pImpl;
13353 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13354 {
13355 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13356 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13357 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13358 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13359 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13360 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13361 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13362 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13363 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13364 }
13365 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13366
13367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13368 {
13369 /* register */
13370 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13372 switch (pVCpu->iem.s.enmEffOpSize)
13373 {
13374 case IEMMODE_16BIT:
13375 IEM_MC_BEGIN(3, 0);
13376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13377 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13379 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13380 IEM_MC_REF_EFLAGS(pEFlags);
13381 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13382 IEM_MC_ADVANCE_RIP();
13383 IEM_MC_END();
13384 return VINF_SUCCESS;
13385
13386 case IEMMODE_32BIT:
13387 IEM_MC_BEGIN(3, 0);
13388 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13389 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13390 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13391 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13392 IEM_MC_REF_EFLAGS(pEFlags);
13393 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13394 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13395 IEM_MC_ADVANCE_RIP();
13396 IEM_MC_END();
13397 return VINF_SUCCESS;
13398
13399 case IEMMODE_64BIT:
13400 IEM_MC_BEGIN(3, 0);
13401 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13402 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13403 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13404 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13405 IEM_MC_REF_EFLAGS(pEFlags);
13406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13407 IEM_MC_ADVANCE_RIP();
13408 IEM_MC_END();
13409 return VINF_SUCCESS;
13410
13411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13412 }
13413 }
13414 else
13415 {
13416 /* memory */
13417 switch (pVCpu->iem.s.enmEffOpSize)
13418 {
13419 case IEMMODE_16BIT:
13420 IEM_MC_BEGIN(3, 2);
13421 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13422 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13423 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13425
13426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13427 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13428 IEM_MC_ASSIGN(cShiftArg, cShift);
13429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13430 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13431 IEM_MC_FETCH_EFLAGS(EFlags);
13432 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13433
13434 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13435 IEM_MC_COMMIT_EFLAGS(EFlags);
13436 IEM_MC_ADVANCE_RIP();
13437 IEM_MC_END();
13438 return VINF_SUCCESS;
13439
13440 case IEMMODE_32BIT:
13441 IEM_MC_BEGIN(3, 2);
13442 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13443 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13444 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13446
13447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13448 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13449 IEM_MC_ASSIGN(cShiftArg, cShift);
13450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13451 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13452 IEM_MC_FETCH_EFLAGS(EFlags);
13453 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13454
13455 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13456 IEM_MC_COMMIT_EFLAGS(EFlags);
13457 IEM_MC_ADVANCE_RIP();
13458 IEM_MC_END();
13459 return VINF_SUCCESS;
13460
13461 case IEMMODE_64BIT:
13462 IEM_MC_BEGIN(3, 2);
13463 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13464 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13465 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13467
13468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13469 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13470 IEM_MC_ASSIGN(cShiftArg, cShift);
13471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13472 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13473 IEM_MC_FETCH_EFLAGS(EFlags);
13474 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13475
13476 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13477 IEM_MC_COMMIT_EFLAGS(EFlags);
13478 IEM_MC_ADVANCE_RIP();
13479 IEM_MC_END();
13480 return VINF_SUCCESS;
13481
13482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13483 }
13484 }
13485}
13486
13487
13488/** Opcode 0xc2. */
13489FNIEMOP_DEF(iemOp_retn_Iw)
13490{
13491 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13492 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13494 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13495 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13496}
13497
13498
13499/** Opcode 0xc3. */
13500FNIEMOP_DEF(iemOp_retn)
13501{
13502 IEMOP_MNEMONIC(retn, "retn");
13503 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13505 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13506}
13507
13508
13509/** Opcode 0xc4. */
13510FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13511{
13512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13513 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13514 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13515 {
13516 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13517 /* The LES instruction is invalid 64-bit mode. In legacy and
13518 compatability mode it is invalid with MOD=3.
13519 The use as a VEX prefix is made possible by assigning the inverted
13520 REX.R to the top MOD bit, and the top bit in the inverted register
13521 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13522 to accessing registers 0..7 in this VEX form. */
13523 /** @todo VEX: Just use new tables for it. */
13524 return IEMOP_RAISE_INVALID_OPCODE();
13525 }
13526 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13527 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13528}
13529
13530
13531/** Opcode 0xc5. */
13532FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13533{
13534 /* The LDS instruction is invalid 64-bit mode. In legacy and
13535 compatability mode it is invalid with MOD=3.
13536 The use as a VEX prefix is made possible by assigning the inverted
13537 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13538 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13540 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13541 {
13542 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13543 {
13544 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13545 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13546 }
13547 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13548 }
13549
13550 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13551 /** @todo Test when exctly the VEX conformance checks kick in during
13552 * instruction decoding and fetching (using \#PF). */
13553 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13554 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13555 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13556#if 0 /* will make sense of this next week... */
13557 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13558 &&
13559 )
13560 {
13561
13562 }
13563#endif
13564
13565 /** @todo VEX: Just use new tables for it. */
13566 return IEMOP_RAISE_INVALID_OPCODE();
13567}
13568
13569
13570/** Opcode 0xc6. */
13571FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13572{
13573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13574 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13575 return IEMOP_RAISE_INVALID_OPCODE();
13576 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13577
13578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13579 {
13580 /* register access */
13581 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13583 IEM_MC_BEGIN(0, 0);
13584 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13585 IEM_MC_ADVANCE_RIP();
13586 IEM_MC_END();
13587 }
13588 else
13589 {
13590 /* memory access. */
13591 IEM_MC_BEGIN(0, 1);
13592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13594 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13596 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13597 IEM_MC_ADVANCE_RIP();
13598 IEM_MC_END();
13599 }
13600 return VINF_SUCCESS;
13601}
13602
13603
13604/** Opcode 0xc7. */
13605FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13606{
13607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13608 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13609 return IEMOP_RAISE_INVALID_OPCODE();
13610 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13611
13612 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13613 {
13614 /* register access */
13615 switch (pVCpu->iem.s.enmEffOpSize)
13616 {
13617 case IEMMODE_16BIT:
13618 IEM_MC_BEGIN(0, 0);
13619 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13621 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13622 IEM_MC_ADVANCE_RIP();
13623 IEM_MC_END();
13624 return VINF_SUCCESS;
13625
13626 case IEMMODE_32BIT:
13627 IEM_MC_BEGIN(0, 0);
13628 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13630 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13631 IEM_MC_ADVANCE_RIP();
13632 IEM_MC_END();
13633 return VINF_SUCCESS;
13634
13635 case IEMMODE_64BIT:
13636 IEM_MC_BEGIN(0, 0);
13637 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13639 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13640 IEM_MC_ADVANCE_RIP();
13641 IEM_MC_END();
13642 return VINF_SUCCESS;
13643
13644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13645 }
13646 }
13647 else
13648 {
13649 /* memory access. */
13650 switch (pVCpu->iem.s.enmEffOpSize)
13651 {
13652 case IEMMODE_16BIT:
13653 IEM_MC_BEGIN(0, 1);
13654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13656 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13658 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13659 IEM_MC_ADVANCE_RIP();
13660 IEM_MC_END();
13661 return VINF_SUCCESS;
13662
13663 case IEMMODE_32BIT:
13664 IEM_MC_BEGIN(0, 1);
13665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13667 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13669 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13670 IEM_MC_ADVANCE_RIP();
13671 IEM_MC_END();
13672 return VINF_SUCCESS;
13673
13674 case IEMMODE_64BIT:
13675 IEM_MC_BEGIN(0, 1);
13676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13678 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13680 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13681 IEM_MC_ADVANCE_RIP();
13682 IEM_MC_END();
13683 return VINF_SUCCESS;
13684
13685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13686 }
13687 }
13688}
13689
13690
13691
13692
13693/** Opcode 0xc8. */
13694FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13695{
13696 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13697 IEMOP_HLP_MIN_186();
13698 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13699 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13700 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13702 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13703}
13704
13705
13706/** Opcode 0xc9. */
13707FNIEMOP_DEF(iemOp_leave)
13708{
13709 IEMOP_MNEMONIC(leave, "leave");
13710 IEMOP_HLP_MIN_186();
13711 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13713 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13714}
13715
13716
13717/** Opcode 0xca. */
13718FNIEMOP_DEF(iemOp_retf_Iw)
13719{
13720 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13721 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13723 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13724 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13725}
13726
13727
13728/** Opcode 0xcb. */
13729FNIEMOP_DEF(iemOp_retf)
13730{
13731 IEMOP_MNEMONIC(retf, "retf");
13732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13733 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13734 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13735}
13736
13737
13738/** Opcode 0xcc. */
13739FNIEMOP_DEF(iemOp_int_3)
13740{
13741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13742 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13743}
13744
13745
13746/** Opcode 0xcd. */
13747FNIEMOP_DEF(iemOp_int_Ib)
13748{
13749 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13751 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13752}
13753
13754
13755/** Opcode 0xce. */
13756FNIEMOP_DEF(iemOp_into)
13757{
13758 IEMOP_MNEMONIC(into, "into");
13759 IEMOP_HLP_NO_64BIT();
13760
13761 IEM_MC_BEGIN(2, 0);
13762 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13763 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13764 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13765 IEM_MC_END();
13766 return VINF_SUCCESS;
13767}
13768
13769
13770/** Opcode 0xcf. */
13771FNIEMOP_DEF(iemOp_iret)
13772{
13773 IEMOP_MNEMONIC(iret, "iret");
13774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13775 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13776}
13777
13778
13779/** Opcode 0xd0. */
13780FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13781{
13782 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13783 PCIEMOPSHIFTSIZES pImpl;
13784 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13785 {
13786 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13787 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13788 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13789 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13790 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13791 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13792 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13793 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13794 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13795 }
13796 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13797
13798 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13799 {
13800 /* register */
13801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13802 IEM_MC_BEGIN(3, 0);
13803 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13804 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13806 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13807 IEM_MC_REF_EFLAGS(pEFlags);
13808 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13809 IEM_MC_ADVANCE_RIP();
13810 IEM_MC_END();
13811 }
13812 else
13813 {
13814 /* memory */
13815 IEM_MC_BEGIN(3, 2);
13816 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13817 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13818 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13820
13821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13823 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13824 IEM_MC_FETCH_EFLAGS(EFlags);
13825 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13826
13827 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13828 IEM_MC_COMMIT_EFLAGS(EFlags);
13829 IEM_MC_ADVANCE_RIP();
13830 IEM_MC_END();
13831 }
13832 return VINF_SUCCESS;
13833}
13834
13835
13836
13837/** Opcode 0xd1. */
13838FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13839{
13840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13841 PCIEMOPSHIFTSIZES pImpl;
13842 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13843 {
13844 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13845 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13846 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13847 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13848 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13849 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13850 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13851 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13852 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13853 }
13854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13855
13856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13857 {
13858 /* register */
13859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13860 switch (pVCpu->iem.s.enmEffOpSize)
13861 {
13862 case IEMMODE_16BIT:
13863 IEM_MC_BEGIN(3, 0);
13864 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13865 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13866 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13867 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13868 IEM_MC_REF_EFLAGS(pEFlags);
13869 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13870 IEM_MC_ADVANCE_RIP();
13871 IEM_MC_END();
13872 return VINF_SUCCESS;
13873
13874 case IEMMODE_32BIT:
13875 IEM_MC_BEGIN(3, 0);
13876 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13877 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13878 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13879 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13880 IEM_MC_REF_EFLAGS(pEFlags);
13881 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13882 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13883 IEM_MC_ADVANCE_RIP();
13884 IEM_MC_END();
13885 return VINF_SUCCESS;
13886
13887 case IEMMODE_64BIT:
13888 IEM_MC_BEGIN(3, 0);
13889 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13890 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13891 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13892 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13893 IEM_MC_REF_EFLAGS(pEFlags);
13894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13895 IEM_MC_ADVANCE_RIP();
13896 IEM_MC_END();
13897 return VINF_SUCCESS;
13898
13899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13900 }
13901 }
13902 else
13903 {
13904 /* memory */
13905 switch (pVCpu->iem.s.enmEffOpSize)
13906 {
13907 case IEMMODE_16BIT:
13908 IEM_MC_BEGIN(3, 2);
13909 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13910 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13911 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13913
13914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13916 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13917 IEM_MC_FETCH_EFLAGS(EFlags);
13918 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13919
13920 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13921 IEM_MC_COMMIT_EFLAGS(EFlags);
13922 IEM_MC_ADVANCE_RIP();
13923 IEM_MC_END();
13924 return VINF_SUCCESS;
13925
13926 case IEMMODE_32BIT:
13927 IEM_MC_BEGIN(3, 2);
13928 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13929 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13930 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13932
13933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13935 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13936 IEM_MC_FETCH_EFLAGS(EFlags);
13937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13938
13939 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13940 IEM_MC_COMMIT_EFLAGS(EFlags);
13941 IEM_MC_ADVANCE_RIP();
13942 IEM_MC_END();
13943 return VINF_SUCCESS;
13944
13945 case IEMMODE_64BIT:
13946 IEM_MC_BEGIN(3, 2);
13947 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13948 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13949 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13951
13952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13954 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13955 IEM_MC_FETCH_EFLAGS(EFlags);
13956 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13957
13958 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13959 IEM_MC_COMMIT_EFLAGS(EFlags);
13960 IEM_MC_ADVANCE_RIP();
13961 IEM_MC_END();
13962 return VINF_SUCCESS;
13963
13964 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13965 }
13966 }
13967}
13968
13969
13970/** Opcode 0xd2. */
13971FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13972{
13973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13974 PCIEMOPSHIFTSIZES pImpl;
13975 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13976 {
13977 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13978 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13979 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13980 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13981 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13982 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13983 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13984 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13985 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13986 }
13987 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13988
13989 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13990 {
13991 /* register */
13992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13993 IEM_MC_BEGIN(3, 0);
13994 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13995 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13996 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13997 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13998 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13999 IEM_MC_REF_EFLAGS(pEFlags);
14000 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14001 IEM_MC_ADVANCE_RIP();
14002 IEM_MC_END();
14003 }
14004 else
14005 {
14006 /* memory */
14007 IEM_MC_BEGIN(3, 2);
14008 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14009 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14010 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14012
14013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14015 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14016 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14017 IEM_MC_FETCH_EFLAGS(EFlags);
14018 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14019
14020 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14021 IEM_MC_COMMIT_EFLAGS(EFlags);
14022 IEM_MC_ADVANCE_RIP();
14023 IEM_MC_END();
14024 }
14025 return VINF_SUCCESS;
14026}
14027
14028
14029/** Opcode 0xd3. */
14030FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
14031{
14032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14033 PCIEMOPSHIFTSIZES pImpl;
14034 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14035 {
14036 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
14037 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
14038 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
14039 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
14040 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
14041 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
14042 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
14043 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14044 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
14045 }
14046 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14047
14048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14049 {
14050 /* register */
14051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14052 switch (pVCpu->iem.s.enmEffOpSize)
14053 {
14054 case IEMMODE_16BIT:
14055 IEM_MC_BEGIN(3, 0);
14056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14057 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14059 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14060 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14061 IEM_MC_REF_EFLAGS(pEFlags);
14062 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14063 IEM_MC_ADVANCE_RIP();
14064 IEM_MC_END();
14065 return VINF_SUCCESS;
14066
14067 case IEMMODE_32BIT:
14068 IEM_MC_BEGIN(3, 0);
14069 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14070 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14072 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14073 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14074 IEM_MC_REF_EFLAGS(pEFlags);
14075 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14076 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14077 IEM_MC_ADVANCE_RIP();
14078 IEM_MC_END();
14079 return VINF_SUCCESS;
14080
14081 case IEMMODE_64BIT:
14082 IEM_MC_BEGIN(3, 0);
14083 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14084 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14085 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14086 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14087 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14088 IEM_MC_REF_EFLAGS(pEFlags);
14089 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14090 IEM_MC_ADVANCE_RIP();
14091 IEM_MC_END();
14092 return VINF_SUCCESS;
14093
14094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14095 }
14096 }
14097 else
14098 {
14099 /* memory */
14100 switch (pVCpu->iem.s.enmEffOpSize)
14101 {
14102 case IEMMODE_16BIT:
14103 IEM_MC_BEGIN(3, 2);
14104 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14105 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14106 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14108
14109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14111 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14112 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14113 IEM_MC_FETCH_EFLAGS(EFlags);
14114 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14115
14116 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14117 IEM_MC_COMMIT_EFLAGS(EFlags);
14118 IEM_MC_ADVANCE_RIP();
14119 IEM_MC_END();
14120 return VINF_SUCCESS;
14121
14122 case IEMMODE_32BIT:
14123 IEM_MC_BEGIN(3, 2);
14124 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14125 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14126 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14128
14129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14131 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14132 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14133 IEM_MC_FETCH_EFLAGS(EFlags);
14134 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14135
14136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14137 IEM_MC_COMMIT_EFLAGS(EFlags);
14138 IEM_MC_ADVANCE_RIP();
14139 IEM_MC_END();
14140 return VINF_SUCCESS;
14141
14142 case IEMMODE_64BIT:
14143 IEM_MC_BEGIN(3, 2);
14144 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14145 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14146 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14148
14149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14151 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14152 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14153 IEM_MC_FETCH_EFLAGS(EFlags);
14154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14155
14156 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14157 IEM_MC_COMMIT_EFLAGS(EFlags);
14158 IEM_MC_ADVANCE_RIP();
14159 IEM_MC_END();
14160 return VINF_SUCCESS;
14161
14162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14163 }
14164 }
14165}
14166
14167/** Opcode 0xd4. */
14168FNIEMOP_DEF(iemOp_aam_Ib)
14169{
14170 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14171 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14173 IEMOP_HLP_NO_64BIT();
14174 if (!bImm)
14175 return IEMOP_RAISE_DIVIDE_ERROR();
14176 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14177}
14178
14179
14180/** Opcode 0xd5. */
14181FNIEMOP_DEF(iemOp_aad_Ib)
14182{
14183 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14184 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14186 IEMOP_HLP_NO_64BIT();
14187 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14188}
14189
14190
14191/** Opcode 0xd6. */
14192FNIEMOP_DEF(iemOp_salc)
14193{
14194 IEMOP_MNEMONIC(salc, "salc");
14195 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14196 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14198 IEMOP_HLP_NO_64BIT();
14199
14200 IEM_MC_BEGIN(0, 0);
14201 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14202 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14203 } IEM_MC_ELSE() {
14204 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14205 } IEM_MC_ENDIF();
14206 IEM_MC_ADVANCE_RIP();
14207 IEM_MC_END();
14208 return VINF_SUCCESS;
14209}
14210
14211
14212/** Opcode 0xd7. */
14213FNIEMOP_DEF(iemOp_xlat)
14214{
14215 IEMOP_MNEMONIC(xlat, "xlat");
14216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14217 switch (pVCpu->iem.s.enmEffAddrMode)
14218 {
14219 case IEMMODE_16BIT:
14220 IEM_MC_BEGIN(2, 0);
14221 IEM_MC_LOCAL(uint8_t, u8Tmp);
14222 IEM_MC_LOCAL(uint16_t, u16Addr);
14223 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14224 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14225 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14226 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14227 IEM_MC_ADVANCE_RIP();
14228 IEM_MC_END();
14229 return VINF_SUCCESS;
14230
14231 case IEMMODE_32BIT:
14232 IEM_MC_BEGIN(2, 0);
14233 IEM_MC_LOCAL(uint8_t, u8Tmp);
14234 IEM_MC_LOCAL(uint32_t, u32Addr);
14235 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14236 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14237 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14238 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14239 IEM_MC_ADVANCE_RIP();
14240 IEM_MC_END();
14241 return VINF_SUCCESS;
14242
14243 case IEMMODE_64BIT:
14244 IEM_MC_BEGIN(2, 0);
14245 IEM_MC_LOCAL(uint8_t, u8Tmp);
14246 IEM_MC_LOCAL(uint64_t, u64Addr);
14247 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14248 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14249 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14250 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14251 IEM_MC_ADVANCE_RIP();
14252 IEM_MC_END();
14253 return VINF_SUCCESS;
14254
14255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14256 }
14257}
14258
14259
14260/**
14261 * Common worker for FPU instructions working on ST0 and STn, and storing the
14262 * result in ST0.
14263 *
14264 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14265 */
14266FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14267{
14268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14269
14270 IEM_MC_BEGIN(3, 1);
14271 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14272 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14273 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14274 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14275
14276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14278 IEM_MC_PREPARE_FPU_USAGE();
14279 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14280 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14281 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14282 IEM_MC_ELSE()
14283 IEM_MC_FPU_STACK_UNDERFLOW(0);
14284 IEM_MC_ENDIF();
14285 IEM_MC_ADVANCE_RIP();
14286
14287 IEM_MC_END();
14288 return VINF_SUCCESS;
14289}
14290
14291
14292/**
14293 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14294 * flags.
14295 *
14296 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14297 */
14298FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14299{
14300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14301
14302 IEM_MC_BEGIN(3, 1);
14303 IEM_MC_LOCAL(uint16_t, u16Fsw);
14304 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14305 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14306 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14307
14308 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14309 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14310 IEM_MC_PREPARE_FPU_USAGE();
14311 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14312 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14313 IEM_MC_UPDATE_FSW(u16Fsw);
14314 IEM_MC_ELSE()
14315 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14316 IEM_MC_ENDIF();
14317 IEM_MC_ADVANCE_RIP();
14318
14319 IEM_MC_END();
14320 return VINF_SUCCESS;
14321}
14322
14323
14324/**
14325 * Common worker for FPU instructions working on ST0 and STn, only affecting
14326 * flags, and popping when done.
14327 *
14328 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14329 */
14330FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14331{
14332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14333
14334 IEM_MC_BEGIN(3, 1);
14335 IEM_MC_LOCAL(uint16_t, u16Fsw);
14336 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14337 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14338 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14339
14340 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14341 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14342 IEM_MC_PREPARE_FPU_USAGE();
14343 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14344 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14345 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14346 IEM_MC_ELSE()
14347 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14348 IEM_MC_ENDIF();
14349 IEM_MC_ADVANCE_RIP();
14350
14351 IEM_MC_END();
14352 return VINF_SUCCESS;
14353}
14354
14355
14356/** Opcode 0xd8 11/0. */
14357FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14358{
14359 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14360 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14361}
14362
14363
14364/** Opcode 0xd8 11/1. */
14365FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14366{
14367 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14368 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14369}
14370
14371
14372/** Opcode 0xd8 11/2. */
14373FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14374{
14375 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14376 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14377}
14378
14379
14380/** Opcode 0xd8 11/3. */
14381FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14382{
14383 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14384 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14385}
14386
14387
14388/** Opcode 0xd8 11/4. */
14389FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14390{
14391 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14392 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14393}
14394
14395
14396/** Opcode 0xd8 11/5. */
14397FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14398{
14399 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14400 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14401}
14402
14403
14404/** Opcode 0xd8 11/6. */
14405FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14406{
14407 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14408 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14409}
14410
14411
14412/** Opcode 0xd8 11/7. */
14413FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14414{
14415 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14416 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14417}
14418
14419
14420/**
14421 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14422 * the result in ST0.
14423 *
14424 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14425 */
14426FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14427{
14428 IEM_MC_BEGIN(3, 3);
14429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14430 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14431 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14432 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14433 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14434 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14435
14436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14438
14439 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14440 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14441 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14442
14443 IEM_MC_PREPARE_FPU_USAGE();
14444 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14445 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14446 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14447 IEM_MC_ELSE()
14448 IEM_MC_FPU_STACK_UNDERFLOW(0);
14449 IEM_MC_ENDIF();
14450 IEM_MC_ADVANCE_RIP();
14451
14452 IEM_MC_END();
14453 return VINF_SUCCESS;
14454}
14455
14456
14457/** Opcode 0xd8 !11/0. */
14458FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14459{
14460 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14461 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14462}
14463
14464
14465/** Opcode 0xd8 !11/1. */
14466FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14467{
14468 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14469 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14470}
14471
14472
14473/** Opcode 0xd8 !11/2. */
14474FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14475{
14476 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14477
14478 IEM_MC_BEGIN(3, 3);
14479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14480 IEM_MC_LOCAL(uint16_t, u16Fsw);
14481 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14482 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14483 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14484 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14485
14486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14488
14489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14490 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14491 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14492
14493 IEM_MC_PREPARE_FPU_USAGE();
14494 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14495 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14496 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14497 IEM_MC_ELSE()
14498 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14499 IEM_MC_ENDIF();
14500 IEM_MC_ADVANCE_RIP();
14501
14502 IEM_MC_END();
14503 return VINF_SUCCESS;
14504}
14505
14506
14507/** Opcode 0xd8 !11/3. */
14508FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14509{
14510 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14511
14512 IEM_MC_BEGIN(3, 3);
14513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14514 IEM_MC_LOCAL(uint16_t, u16Fsw);
14515 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14516 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14517 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14518 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14519
14520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14522
14523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14524 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14525 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14526
14527 IEM_MC_PREPARE_FPU_USAGE();
14528 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14529 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14530 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14531 IEM_MC_ELSE()
14532 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14533 IEM_MC_ENDIF();
14534 IEM_MC_ADVANCE_RIP();
14535
14536 IEM_MC_END();
14537 return VINF_SUCCESS;
14538}
14539
14540
14541/** Opcode 0xd8 !11/4. */
14542FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14543{
14544 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14545 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14546}
14547
14548
14549/** Opcode 0xd8 !11/5. */
14550FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14551{
14552 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14553 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14554}
14555
14556
14557/** Opcode 0xd8 !11/6. */
14558FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14559{
14560 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14561 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14562}
14563
14564
14565/** Opcode 0xd8 !11/7. */
14566FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14567{
14568 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14569 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14570}
14571
14572
14573/** Opcode 0xd8. */
14574FNIEMOP_DEF(iemOp_EscF0)
14575{
14576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14577 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14578
14579 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14580 {
14581 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14582 {
14583 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14584 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14585 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14586 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14587 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14588 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14589 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14590 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14592 }
14593 }
14594 else
14595 {
14596 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14597 {
14598 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14599 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14600 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14601 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14602 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14603 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14604 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14605 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14607 }
14608 }
14609}
14610
14611
14612/** Opcode 0xd9 /0 mem32real
14613 * @sa iemOp_fld_m64r */
14614FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14615{
14616 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14617
14618 IEM_MC_BEGIN(2, 3);
14619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14620 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14621 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14622 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14623 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14624
14625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14627
14628 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14629 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14630 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14631
14632 IEM_MC_PREPARE_FPU_USAGE();
14633 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14634 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14635 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14636 IEM_MC_ELSE()
14637 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14638 IEM_MC_ENDIF();
14639 IEM_MC_ADVANCE_RIP();
14640
14641 IEM_MC_END();
14642 return VINF_SUCCESS;
14643}
14644
14645
14646/** Opcode 0xd9 !11/2 mem32real */
14647FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14648{
14649 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14650 IEM_MC_BEGIN(3, 2);
14651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14652 IEM_MC_LOCAL(uint16_t, u16Fsw);
14653 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14654 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14655 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14656
14657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14660 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14661
14662 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14663 IEM_MC_PREPARE_FPU_USAGE();
14664 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14665 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14666 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14667 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14668 IEM_MC_ELSE()
14669 IEM_MC_IF_FCW_IM()
14670 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14671 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14672 IEM_MC_ENDIF();
14673 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14674 IEM_MC_ENDIF();
14675 IEM_MC_ADVANCE_RIP();
14676
14677 IEM_MC_END();
14678 return VINF_SUCCESS;
14679}
14680
14681
14682/** Opcode 0xd9 !11/3 */
14683FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14684{
14685 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14686 IEM_MC_BEGIN(3, 2);
14687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14688 IEM_MC_LOCAL(uint16_t, u16Fsw);
14689 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14690 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14691 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14692
14693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14695 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14696 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14697
14698 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14699 IEM_MC_PREPARE_FPU_USAGE();
14700 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14701 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14702 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14703 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14704 IEM_MC_ELSE()
14705 IEM_MC_IF_FCW_IM()
14706 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14707 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14708 IEM_MC_ENDIF();
14709 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14710 IEM_MC_ENDIF();
14711 IEM_MC_ADVANCE_RIP();
14712
14713 IEM_MC_END();
14714 return VINF_SUCCESS;
14715}
14716
14717
14718/** Opcode 0xd9 !11/4 */
14719FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14720{
14721 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14722 IEM_MC_BEGIN(3, 0);
14723 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14724 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14725 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14730 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14731 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14732 IEM_MC_END();
14733 return VINF_SUCCESS;
14734}
14735
14736
14737/** Opcode 0xd9 !11/5 */
14738FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14739{
14740 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14741 IEM_MC_BEGIN(1, 1);
14742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14743 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14747 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14748 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14749 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14750 IEM_MC_END();
14751 return VINF_SUCCESS;
14752}
14753
14754
14755/** Opcode 0xd9 !11/6 */
14756FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14757{
14758 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14759 IEM_MC_BEGIN(3, 0);
14760 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14761 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14762 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14765 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14766 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14767 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14768 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14769 IEM_MC_END();
14770 return VINF_SUCCESS;
14771}
14772
14773
14774/** Opcode 0xd9 !11/7 */
14775FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14776{
14777 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14778 IEM_MC_BEGIN(2, 0);
14779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14780 IEM_MC_LOCAL(uint16_t, u16Fcw);
14781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14783 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14784 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14785 IEM_MC_FETCH_FCW(u16Fcw);
14786 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14787 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14788 IEM_MC_END();
14789 return VINF_SUCCESS;
14790}
14791
14792
14793/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14794FNIEMOP_DEF(iemOp_fnop)
14795{
14796 IEMOP_MNEMONIC(fnop, "fnop");
14797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14798
14799 IEM_MC_BEGIN(0, 0);
14800 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14801 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14802 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14803 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14804 * intel optimizations. Investigate. */
14805 IEM_MC_UPDATE_FPU_OPCODE_IP();
14806 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14807 IEM_MC_END();
14808 return VINF_SUCCESS;
14809}
14810
14811
14812/** Opcode 0xd9 11/0 stN */
14813FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14814{
14815 IEMOP_MNEMONIC(fld_stN, "fld stN");
14816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14817
14818 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14819 * indicates that it does. */
14820 IEM_MC_BEGIN(0, 2);
14821 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14822 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14824 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14825
14826 IEM_MC_PREPARE_FPU_USAGE();
14827 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14828 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14829 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14830 IEM_MC_ELSE()
14831 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14832 IEM_MC_ENDIF();
14833
14834 IEM_MC_ADVANCE_RIP();
14835 IEM_MC_END();
14836
14837 return VINF_SUCCESS;
14838}
14839
14840
14841/** Opcode 0xd9 11/3 stN */
14842FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14843{
14844 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14846
14847 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14848 * indicates that it does. */
14849 IEM_MC_BEGIN(1, 3);
14850 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14851 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14852 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14853 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14856
14857 IEM_MC_PREPARE_FPU_USAGE();
14858 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14859 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14860 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14861 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14862 IEM_MC_ELSE()
14863 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14864 IEM_MC_ENDIF();
14865
14866 IEM_MC_ADVANCE_RIP();
14867 IEM_MC_END();
14868
14869 return VINF_SUCCESS;
14870}
14871
14872
14873/** Opcode 0xd9 11/4, 0xdd 11/2. */
14874FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14875{
14876 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14878
14879 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14880 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14881 if (!iDstReg)
14882 {
14883 IEM_MC_BEGIN(0, 1);
14884 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14885 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14886 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14887
14888 IEM_MC_PREPARE_FPU_USAGE();
14889 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14890 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14891 IEM_MC_ELSE()
14892 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14893 IEM_MC_ENDIF();
14894
14895 IEM_MC_ADVANCE_RIP();
14896 IEM_MC_END();
14897 }
14898 else
14899 {
14900 IEM_MC_BEGIN(0, 2);
14901 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14902 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14904 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14905
14906 IEM_MC_PREPARE_FPU_USAGE();
14907 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14908 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14909 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14910 IEM_MC_ELSE()
14911 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14912 IEM_MC_ENDIF();
14913
14914 IEM_MC_ADVANCE_RIP();
14915 IEM_MC_END();
14916 }
14917 return VINF_SUCCESS;
14918}
14919
14920
14921/**
14922 * Common worker for FPU instructions working on ST0 and replaces it with the
14923 * result, i.e. unary operators.
14924 *
14925 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14926 */
14927FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14928{
14929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14930
14931 IEM_MC_BEGIN(2, 1);
14932 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14933 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14934 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14935
14936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14938 IEM_MC_PREPARE_FPU_USAGE();
14939 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14940 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14941 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14942 IEM_MC_ELSE()
14943 IEM_MC_FPU_STACK_UNDERFLOW(0);
14944 IEM_MC_ENDIF();
14945 IEM_MC_ADVANCE_RIP();
14946
14947 IEM_MC_END();
14948 return VINF_SUCCESS;
14949}
14950
14951
14952/** Opcode 0xd9 0xe0. */
14953FNIEMOP_DEF(iemOp_fchs)
14954{
14955 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14956 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14957}
14958
14959
14960/** Opcode 0xd9 0xe1. */
14961FNIEMOP_DEF(iemOp_fabs)
14962{
14963 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14964 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14965}
14966
14967
14968/**
14969 * Common worker for FPU instructions working on ST0 and only returns FSW.
14970 *
14971 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14972 */
14973FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14974{
14975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14976
14977 IEM_MC_BEGIN(2, 1);
14978 IEM_MC_LOCAL(uint16_t, u16Fsw);
14979 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14980 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14981
14982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14984 IEM_MC_PREPARE_FPU_USAGE();
14985 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14986 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14987 IEM_MC_UPDATE_FSW(u16Fsw);
14988 IEM_MC_ELSE()
14989 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14990 IEM_MC_ENDIF();
14991 IEM_MC_ADVANCE_RIP();
14992
14993 IEM_MC_END();
14994 return VINF_SUCCESS;
14995}
14996
14997
14998/** Opcode 0xd9 0xe4. */
14999FNIEMOP_DEF(iemOp_ftst)
15000{
15001 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
15002 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
15003}
15004
15005
15006/** Opcode 0xd9 0xe5. */
15007FNIEMOP_DEF(iemOp_fxam)
15008{
15009 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
15010 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
15011}
15012
15013
15014/**
15015 * Common worker for FPU instructions pushing a constant onto the FPU stack.
15016 *
15017 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15018 */
15019FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
15020{
15021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15022
15023 IEM_MC_BEGIN(1, 1);
15024 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15025 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15026
15027 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15028 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15029 IEM_MC_PREPARE_FPU_USAGE();
15030 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15031 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
15032 IEM_MC_PUSH_FPU_RESULT(FpuRes);
15033 IEM_MC_ELSE()
15034 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
15035 IEM_MC_ENDIF();
15036 IEM_MC_ADVANCE_RIP();
15037
15038 IEM_MC_END();
15039 return VINF_SUCCESS;
15040}
15041
15042
15043/** Opcode 0xd9 0xe8. */
15044FNIEMOP_DEF(iemOp_fld1)
15045{
15046 IEMOP_MNEMONIC(fld1, "fld1");
15047 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
15048}
15049
15050
15051/** Opcode 0xd9 0xe9. */
15052FNIEMOP_DEF(iemOp_fldl2t)
15053{
15054 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15055 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15056}
15057
15058
15059/** Opcode 0xd9 0xea. */
15060FNIEMOP_DEF(iemOp_fldl2e)
15061{
15062 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15063 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15064}
15065
15066/** Opcode 0xd9 0xeb. */
15067FNIEMOP_DEF(iemOp_fldpi)
15068{
15069 IEMOP_MNEMONIC(fldpi, "fldpi");
15070 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15071}
15072
15073
15074/** Opcode 0xd9 0xec. */
15075FNIEMOP_DEF(iemOp_fldlg2)
15076{
15077 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15078 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15079}
15080
15081/** Opcode 0xd9 0xed. */
15082FNIEMOP_DEF(iemOp_fldln2)
15083{
15084 IEMOP_MNEMONIC(fldln2, "fldln2");
15085 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15086}
15087
15088
15089/** Opcode 0xd9 0xee. */
15090FNIEMOP_DEF(iemOp_fldz)
15091{
15092 IEMOP_MNEMONIC(fldz, "fldz");
15093 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15094}
15095
15096
15097/** Opcode 0xd9 0xf0. */
15098FNIEMOP_DEF(iemOp_f2xm1)
15099{
15100 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15101 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15102}
15103
15104
15105/**
15106 * Common worker for FPU instructions working on STn and ST0, storing the result
15107 * in STn, and popping the stack unless IE, DE or ZE was raised.
15108 *
15109 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15110 */
15111FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15112{
15113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15114
15115 IEM_MC_BEGIN(3, 1);
15116 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15117 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15118 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15119 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15120
15121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15123
15124 IEM_MC_PREPARE_FPU_USAGE();
15125 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15126 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15127 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15128 IEM_MC_ELSE()
15129 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15130 IEM_MC_ENDIF();
15131 IEM_MC_ADVANCE_RIP();
15132
15133 IEM_MC_END();
15134 return VINF_SUCCESS;
15135}
15136
15137
15138/** Opcode 0xd9 0xf1. */
15139FNIEMOP_DEF(iemOp_fyl2x)
15140{
15141 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15142 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15143}
15144
15145
15146/**
15147 * Common worker for FPU instructions working on ST0 and having two outputs, one
15148 * replacing ST0 and one pushed onto the stack.
15149 *
15150 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15151 */
15152FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15153{
15154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15155
15156 IEM_MC_BEGIN(2, 1);
15157 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15158 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15159 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15160
15161 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15162 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15163 IEM_MC_PREPARE_FPU_USAGE();
15164 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15165 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15166 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15167 IEM_MC_ELSE()
15168 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15169 IEM_MC_ENDIF();
15170 IEM_MC_ADVANCE_RIP();
15171
15172 IEM_MC_END();
15173 return VINF_SUCCESS;
15174}
15175
15176
15177/** Opcode 0xd9 0xf2. */
15178FNIEMOP_DEF(iemOp_fptan)
15179{
15180 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15181 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15182}
15183
15184
15185/** Opcode 0xd9 0xf3. */
15186FNIEMOP_DEF(iemOp_fpatan)
15187{
15188 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15189 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15190}
15191
15192
15193/** Opcode 0xd9 0xf4. */
15194FNIEMOP_DEF(iemOp_fxtract)
15195{
15196 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15197 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15198}
15199
15200
15201/** Opcode 0xd9 0xf5. */
15202FNIEMOP_DEF(iemOp_fprem1)
15203{
15204 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15205 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15206}
15207
15208
15209/** Opcode 0xd9 0xf6. */
15210FNIEMOP_DEF(iemOp_fdecstp)
15211{
15212 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15214 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15215 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15216 * FINCSTP and FDECSTP. */
15217
15218 IEM_MC_BEGIN(0,0);
15219
15220 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15221 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15222
15223 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15224 IEM_MC_FPU_STACK_DEC_TOP();
15225 IEM_MC_UPDATE_FSW_CONST(0);
15226
15227 IEM_MC_ADVANCE_RIP();
15228 IEM_MC_END();
15229 return VINF_SUCCESS;
15230}
15231
15232
15233/** Opcode 0xd9 0xf7. */
15234FNIEMOP_DEF(iemOp_fincstp)
15235{
15236 IEMOP_MNEMONIC(fincstp, "fincstp");
15237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15238 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15239 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15240 * FINCSTP and FDECSTP. */
15241
15242 IEM_MC_BEGIN(0,0);
15243
15244 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15245 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15246
15247 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15248 IEM_MC_FPU_STACK_INC_TOP();
15249 IEM_MC_UPDATE_FSW_CONST(0);
15250
15251 IEM_MC_ADVANCE_RIP();
15252 IEM_MC_END();
15253 return VINF_SUCCESS;
15254}
15255
15256
15257/** Opcode 0xd9 0xf8. */
15258FNIEMOP_DEF(iemOp_fprem)
15259{
15260 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15261 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15262}
15263
15264
15265/** Opcode 0xd9 0xf9. */
15266FNIEMOP_DEF(iemOp_fyl2xp1)
15267{
15268 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15269 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15270}
15271
15272
15273/** Opcode 0xd9 0xfa. */
15274FNIEMOP_DEF(iemOp_fsqrt)
15275{
15276 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15277 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15278}
15279
15280
15281/** Opcode 0xd9 0xfb. */
15282FNIEMOP_DEF(iemOp_fsincos)
15283{
15284 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15285 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15286}
15287
15288
15289/** Opcode 0xd9 0xfc. */
15290FNIEMOP_DEF(iemOp_frndint)
15291{
15292 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15293 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15294}
15295
15296
15297/** Opcode 0xd9 0xfd. */
15298FNIEMOP_DEF(iemOp_fscale)
15299{
15300 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15301 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15302}
15303
15304
15305/** Opcode 0xd9 0xfe. */
15306FNIEMOP_DEF(iemOp_fsin)
15307{
15308 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15309 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15310}
15311
15312
15313/** Opcode 0xd9 0xff. */
15314FNIEMOP_DEF(iemOp_fcos)
15315{
15316 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15317 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15318}
15319
15320
15321/** Used by iemOp_EscF1. */
15322IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15323{
15324 /* 0xe0 */ iemOp_fchs,
15325 /* 0xe1 */ iemOp_fabs,
15326 /* 0xe2 */ iemOp_Invalid,
15327 /* 0xe3 */ iemOp_Invalid,
15328 /* 0xe4 */ iemOp_ftst,
15329 /* 0xe5 */ iemOp_fxam,
15330 /* 0xe6 */ iemOp_Invalid,
15331 /* 0xe7 */ iemOp_Invalid,
15332 /* 0xe8 */ iemOp_fld1,
15333 /* 0xe9 */ iemOp_fldl2t,
15334 /* 0xea */ iemOp_fldl2e,
15335 /* 0xeb */ iemOp_fldpi,
15336 /* 0xec */ iemOp_fldlg2,
15337 /* 0xed */ iemOp_fldln2,
15338 /* 0xee */ iemOp_fldz,
15339 /* 0xef */ iemOp_Invalid,
15340 /* 0xf0 */ iemOp_f2xm1,
15341 /* 0xf1 */ iemOp_fyl2x,
15342 /* 0xf2 */ iemOp_fptan,
15343 /* 0xf3 */ iemOp_fpatan,
15344 /* 0xf4 */ iemOp_fxtract,
15345 /* 0xf5 */ iemOp_fprem1,
15346 /* 0xf6 */ iemOp_fdecstp,
15347 /* 0xf7 */ iemOp_fincstp,
15348 /* 0xf8 */ iemOp_fprem,
15349 /* 0xf9 */ iemOp_fyl2xp1,
15350 /* 0xfa */ iemOp_fsqrt,
15351 /* 0xfb */ iemOp_fsincos,
15352 /* 0xfc */ iemOp_frndint,
15353 /* 0xfd */ iemOp_fscale,
15354 /* 0xfe */ iemOp_fsin,
15355 /* 0xff */ iemOp_fcos
15356};
15357
15358
15359/** Opcode 0xd9. */
15360FNIEMOP_DEF(iemOp_EscF1)
15361{
15362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15363 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15364
15365 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15366 {
15367 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15368 {
15369 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15370 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15371 case 2:
15372 if (bRm == 0xd0)
15373 return FNIEMOP_CALL(iemOp_fnop);
15374 return IEMOP_RAISE_INVALID_OPCODE();
15375 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15376 case 4:
15377 case 5:
15378 case 6:
15379 case 7:
15380 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15381 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15383 }
15384 }
15385 else
15386 {
15387 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15388 {
15389 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15390 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15391 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15392 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15393 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15394 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15395 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15396 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15398 }
15399 }
15400}
15401
15402
15403/** Opcode 0xda 11/0. */
15404FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15405{
15406 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15408
15409 IEM_MC_BEGIN(0, 1);
15410 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15411
15412 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15413 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15414
15415 IEM_MC_PREPARE_FPU_USAGE();
15416 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15417 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15418 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15419 IEM_MC_ENDIF();
15420 IEM_MC_UPDATE_FPU_OPCODE_IP();
15421 IEM_MC_ELSE()
15422 IEM_MC_FPU_STACK_UNDERFLOW(0);
15423 IEM_MC_ENDIF();
15424 IEM_MC_ADVANCE_RIP();
15425
15426 IEM_MC_END();
15427 return VINF_SUCCESS;
15428}
15429
15430
15431/** Opcode 0xda 11/1. */
15432FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15433{
15434 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15436
15437 IEM_MC_BEGIN(0, 1);
15438 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15439
15440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15441 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15442
15443 IEM_MC_PREPARE_FPU_USAGE();
15444 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15445 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15446 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15447 IEM_MC_ENDIF();
15448 IEM_MC_UPDATE_FPU_OPCODE_IP();
15449 IEM_MC_ELSE()
15450 IEM_MC_FPU_STACK_UNDERFLOW(0);
15451 IEM_MC_ENDIF();
15452 IEM_MC_ADVANCE_RIP();
15453
15454 IEM_MC_END();
15455 return VINF_SUCCESS;
15456}
15457
15458
15459/** Opcode 0xda 11/2. */
15460FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15461{
15462 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15464
15465 IEM_MC_BEGIN(0, 1);
15466 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15467
15468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15470
15471 IEM_MC_PREPARE_FPU_USAGE();
15472 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15473 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15474 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15475 IEM_MC_ENDIF();
15476 IEM_MC_UPDATE_FPU_OPCODE_IP();
15477 IEM_MC_ELSE()
15478 IEM_MC_FPU_STACK_UNDERFLOW(0);
15479 IEM_MC_ENDIF();
15480 IEM_MC_ADVANCE_RIP();
15481
15482 IEM_MC_END();
15483 return VINF_SUCCESS;
15484}
15485
15486
15487/** Opcode 0xda 11/3. */
15488FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15489{
15490 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15492
15493 IEM_MC_BEGIN(0, 1);
15494 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15495
15496 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15497 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15498
15499 IEM_MC_PREPARE_FPU_USAGE();
15500 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15501 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15502 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15503 IEM_MC_ENDIF();
15504 IEM_MC_UPDATE_FPU_OPCODE_IP();
15505 IEM_MC_ELSE()
15506 IEM_MC_FPU_STACK_UNDERFLOW(0);
15507 IEM_MC_ENDIF();
15508 IEM_MC_ADVANCE_RIP();
15509
15510 IEM_MC_END();
15511 return VINF_SUCCESS;
15512}
15513
15514
15515/**
15516 * Common worker for FPU instructions working on ST0 and STn, only affecting
15517 * flags, and popping twice when done.
15518 *
15519 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15520 */
15521FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15522{
15523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15524
15525 IEM_MC_BEGIN(3, 1);
15526 IEM_MC_LOCAL(uint16_t, u16Fsw);
15527 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15528 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15529 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15530
15531 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15532 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15533
15534 IEM_MC_PREPARE_FPU_USAGE();
15535 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15536 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15537 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15538 IEM_MC_ELSE()
15539 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15540 IEM_MC_ENDIF();
15541 IEM_MC_ADVANCE_RIP();
15542
15543 IEM_MC_END();
15544 return VINF_SUCCESS;
15545}
15546
15547
15548/** Opcode 0xda 0xe9. */
15549FNIEMOP_DEF(iemOp_fucompp)
15550{
15551 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15552 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15553}
15554
15555
15556/**
15557 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15558 * the result in ST0.
15559 *
15560 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15561 */
15562FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15563{
15564 IEM_MC_BEGIN(3, 3);
15565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15566 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15567 IEM_MC_LOCAL(int32_t, i32Val2);
15568 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15569 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15570 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15571
15572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15574
15575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15577 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15578
15579 IEM_MC_PREPARE_FPU_USAGE();
15580 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15581 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15582 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15583 IEM_MC_ELSE()
15584 IEM_MC_FPU_STACK_UNDERFLOW(0);
15585 IEM_MC_ENDIF();
15586 IEM_MC_ADVANCE_RIP();
15587
15588 IEM_MC_END();
15589 return VINF_SUCCESS;
15590}
15591
15592
15593/** Opcode 0xda !11/0. */
15594FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15595{
15596 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15597 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15598}
15599
15600
15601/** Opcode 0xda !11/1. */
15602FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15603{
15604 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15605 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15606}
15607
15608
15609/** Opcode 0xda !11/2. */
15610FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15611{
15612 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15613
15614 IEM_MC_BEGIN(3, 3);
15615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15616 IEM_MC_LOCAL(uint16_t, u16Fsw);
15617 IEM_MC_LOCAL(int32_t, i32Val2);
15618 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15619 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15620 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15621
15622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15624
15625 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15626 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15627 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15628
15629 IEM_MC_PREPARE_FPU_USAGE();
15630 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15631 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15632 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15633 IEM_MC_ELSE()
15634 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15635 IEM_MC_ENDIF();
15636 IEM_MC_ADVANCE_RIP();
15637
15638 IEM_MC_END();
15639 return VINF_SUCCESS;
15640}
15641
15642
15643/** Opcode 0xda !11/3. */
15644FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15645{
15646 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15647
15648 IEM_MC_BEGIN(3, 3);
15649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15650 IEM_MC_LOCAL(uint16_t, u16Fsw);
15651 IEM_MC_LOCAL(int32_t, i32Val2);
15652 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15653 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15654 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15655
15656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15658
15659 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15660 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15661 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15662
15663 IEM_MC_PREPARE_FPU_USAGE();
15664 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15665 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15666 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15667 IEM_MC_ELSE()
15668 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15669 IEM_MC_ENDIF();
15670 IEM_MC_ADVANCE_RIP();
15671
15672 IEM_MC_END();
15673 return VINF_SUCCESS;
15674}
15675
15676
15677/** Opcode 0xda !11/4. */
15678FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15679{
15680 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15681 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15682}
15683
15684
15685/** Opcode 0xda !11/5. */
15686FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15687{
15688 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15689 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15690}
15691
15692
15693/** Opcode 0xda !11/6. */
15694FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15695{
15696 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15697 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15698}
15699
15700
15701/** Opcode 0xda !11/7. */
15702FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15703{
15704 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15705 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15706}
15707
15708
15709/** Opcode 0xda. */
15710FNIEMOP_DEF(iemOp_EscF2)
15711{
15712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15713 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15715 {
15716 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15717 {
15718 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15719 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15720 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15721 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15722 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15723 case 5:
15724 if (bRm == 0xe9)
15725 return FNIEMOP_CALL(iemOp_fucompp);
15726 return IEMOP_RAISE_INVALID_OPCODE();
15727 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15728 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15730 }
15731 }
15732 else
15733 {
15734 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15735 {
15736 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15737 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15738 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15739 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15740 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15741 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15742 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15743 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15745 }
15746 }
15747}
15748
15749
15750/** Opcode 0xdb !11/0. */
15751FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15752{
15753 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15754
15755 IEM_MC_BEGIN(2, 3);
15756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15757 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15758 IEM_MC_LOCAL(int32_t, i32Val);
15759 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15760 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15761
15762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15764
15765 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15766 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15767 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15768
15769 IEM_MC_PREPARE_FPU_USAGE();
15770 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15771 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15772 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15773 IEM_MC_ELSE()
15774 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15775 IEM_MC_ENDIF();
15776 IEM_MC_ADVANCE_RIP();
15777
15778 IEM_MC_END();
15779 return VINF_SUCCESS;
15780}
15781
15782
15783/** Opcode 0xdb !11/1. */
15784FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15785{
15786 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15787 IEM_MC_BEGIN(3, 2);
15788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15789 IEM_MC_LOCAL(uint16_t, u16Fsw);
15790 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15791 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15792 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15793
15794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15798
15799 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15800 IEM_MC_PREPARE_FPU_USAGE();
15801 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15802 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15803 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15804 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15805 IEM_MC_ELSE()
15806 IEM_MC_IF_FCW_IM()
15807 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15808 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15809 IEM_MC_ENDIF();
15810 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15811 IEM_MC_ENDIF();
15812 IEM_MC_ADVANCE_RIP();
15813
15814 IEM_MC_END();
15815 return VINF_SUCCESS;
15816}
15817
15818
15819/** Opcode 0xdb !11/2. */
15820FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15821{
15822 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15823 IEM_MC_BEGIN(3, 2);
15824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15825 IEM_MC_LOCAL(uint16_t, u16Fsw);
15826 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15827 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15828 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15829
15830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15832 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15833 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15834
15835 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15836 IEM_MC_PREPARE_FPU_USAGE();
15837 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15838 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15839 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15840 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15841 IEM_MC_ELSE()
15842 IEM_MC_IF_FCW_IM()
15843 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15844 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15845 IEM_MC_ENDIF();
15846 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15847 IEM_MC_ENDIF();
15848 IEM_MC_ADVANCE_RIP();
15849
15850 IEM_MC_END();
15851 return VINF_SUCCESS;
15852}
15853
15854
15855/** Opcode 0xdb !11/3. */
15856FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15857{
15858 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15859 IEM_MC_BEGIN(3, 2);
15860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15861 IEM_MC_LOCAL(uint16_t, u16Fsw);
15862 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15863 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15864 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15865
15866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15868 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15869 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15870
15871 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15872 IEM_MC_PREPARE_FPU_USAGE();
15873 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15874 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15875 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15876 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15877 IEM_MC_ELSE()
15878 IEM_MC_IF_FCW_IM()
15879 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15880 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15881 IEM_MC_ENDIF();
15882 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15883 IEM_MC_ENDIF();
15884 IEM_MC_ADVANCE_RIP();
15885
15886 IEM_MC_END();
15887 return VINF_SUCCESS;
15888}
15889
15890
15891/** Opcode 0xdb !11/5. */
15892FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15893{
15894 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15895
15896 IEM_MC_BEGIN(2, 3);
15897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15898 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15899 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15900 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15901 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15902
15903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15905
15906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15908 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15909
15910 IEM_MC_PREPARE_FPU_USAGE();
15911 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15912 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15913 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15914 IEM_MC_ELSE()
15915 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15916 IEM_MC_ENDIF();
15917 IEM_MC_ADVANCE_RIP();
15918
15919 IEM_MC_END();
15920 return VINF_SUCCESS;
15921}
15922
15923
15924/** Opcode 0xdb !11/7. */
15925FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15926{
15927 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15928 IEM_MC_BEGIN(3, 2);
15929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15930 IEM_MC_LOCAL(uint16_t, u16Fsw);
15931 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15932 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15933 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15934
15935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15937 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15938 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15939
15940 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15941 IEM_MC_PREPARE_FPU_USAGE();
15942 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15943 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15944 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15945 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15946 IEM_MC_ELSE()
15947 IEM_MC_IF_FCW_IM()
15948 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15949 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15950 IEM_MC_ENDIF();
15951 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15952 IEM_MC_ENDIF();
15953 IEM_MC_ADVANCE_RIP();
15954
15955 IEM_MC_END();
15956 return VINF_SUCCESS;
15957}
15958
15959
15960/** Opcode 0xdb 11/0. */
15961FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15962{
15963 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15965
15966 IEM_MC_BEGIN(0, 1);
15967 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15968
15969 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15970 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15971
15972 IEM_MC_PREPARE_FPU_USAGE();
15973 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15974 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15975 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15976 IEM_MC_ENDIF();
15977 IEM_MC_UPDATE_FPU_OPCODE_IP();
15978 IEM_MC_ELSE()
15979 IEM_MC_FPU_STACK_UNDERFLOW(0);
15980 IEM_MC_ENDIF();
15981 IEM_MC_ADVANCE_RIP();
15982
15983 IEM_MC_END();
15984 return VINF_SUCCESS;
15985}
15986
15987
15988/** Opcode 0xdb 11/1. */
15989FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15990{
15991 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15993
15994 IEM_MC_BEGIN(0, 1);
15995 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15996
15997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15999
16000 IEM_MC_PREPARE_FPU_USAGE();
16001 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16002 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
16003 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16004 IEM_MC_ENDIF();
16005 IEM_MC_UPDATE_FPU_OPCODE_IP();
16006 IEM_MC_ELSE()
16007 IEM_MC_FPU_STACK_UNDERFLOW(0);
16008 IEM_MC_ENDIF();
16009 IEM_MC_ADVANCE_RIP();
16010
16011 IEM_MC_END();
16012 return VINF_SUCCESS;
16013}
16014
16015
16016/** Opcode 0xdb 11/2. */
16017FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
16018{
16019 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
16020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16021
16022 IEM_MC_BEGIN(0, 1);
16023 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16024
16025 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16026 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16027
16028 IEM_MC_PREPARE_FPU_USAGE();
16029 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16030 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
16031 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16032 IEM_MC_ENDIF();
16033 IEM_MC_UPDATE_FPU_OPCODE_IP();
16034 IEM_MC_ELSE()
16035 IEM_MC_FPU_STACK_UNDERFLOW(0);
16036 IEM_MC_ENDIF();
16037 IEM_MC_ADVANCE_RIP();
16038
16039 IEM_MC_END();
16040 return VINF_SUCCESS;
16041}
16042
16043
16044/** Opcode 0xdb 11/3. */
16045FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
16046{
16047 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
16048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16049
16050 IEM_MC_BEGIN(0, 1);
16051 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16052
16053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16054 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16055
16056 IEM_MC_PREPARE_FPU_USAGE();
16057 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16058 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16059 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16060 IEM_MC_ENDIF();
16061 IEM_MC_UPDATE_FPU_OPCODE_IP();
16062 IEM_MC_ELSE()
16063 IEM_MC_FPU_STACK_UNDERFLOW(0);
16064 IEM_MC_ENDIF();
16065 IEM_MC_ADVANCE_RIP();
16066
16067 IEM_MC_END();
16068 return VINF_SUCCESS;
16069}
16070
16071
16072/** Opcode 0xdb 0xe0. */
16073FNIEMOP_DEF(iemOp_fneni)
16074{
16075 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16077 IEM_MC_BEGIN(0,0);
16078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16079 IEM_MC_ADVANCE_RIP();
16080 IEM_MC_END();
16081 return VINF_SUCCESS;
16082}
16083
16084
16085/** Opcode 0xdb 0xe1. */
16086FNIEMOP_DEF(iemOp_fndisi)
16087{
16088 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16090 IEM_MC_BEGIN(0,0);
16091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16092 IEM_MC_ADVANCE_RIP();
16093 IEM_MC_END();
16094 return VINF_SUCCESS;
16095}
16096
16097
16098/** Opcode 0xdb 0xe2. */
16099FNIEMOP_DEF(iemOp_fnclex)
16100{
16101 IEMOP_MNEMONIC(fnclex, "fnclex");
16102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16103
16104 IEM_MC_BEGIN(0,0);
16105 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16106 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16107 IEM_MC_CLEAR_FSW_EX();
16108 IEM_MC_ADVANCE_RIP();
16109 IEM_MC_END();
16110 return VINF_SUCCESS;
16111}
16112
16113
16114/** Opcode 0xdb 0xe3. */
16115FNIEMOP_DEF(iemOp_fninit)
16116{
16117 IEMOP_MNEMONIC(fninit, "fninit");
16118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16119 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16120}
16121
16122
16123/** Opcode 0xdb 0xe4. */
16124FNIEMOP_DEF(iemOp_fnsetpm)
16125{
16126 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16128 IEM_MC_BEGIN(0,0);
16129 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16130 IEM_MC_ADVANCE_RIP();
16131 IEM_MC_END();
16132 return VINF_SUCCESS;
16133}
16134
16135
16136/** Opcode 0xdb 0xe5. */
16137FNIEMOP_DEF(iemOp_frstpm)
16138{
16139 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16140#if 0 /* #UDs on newer CPUs */
16141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16142 IEM_MC_BEGIN(0,0);
16143 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16144 IEM_MC_ADVANCE_RIP();
16145 IEM_MC_END();
16146 return VINF_SUCCESS;
16147#else
16148 return IEMOP_RAISE_INVALID_OPCODE();
16149#endif
16150}
16151
16152
16153/** Opcode 0xdb 11/5. */
16154FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16155{
16156 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16157 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16158}
16159
16160
16161/** Opcode 0xdb 11/6. */
16162FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16163{
16164 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16165 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16166}
16167
16168
16169/** Opcode 0xdb. */
16170FNIEMOP_DEF(iemOp_EscF3)
16171{
16172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16173 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16174 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16175 {
16176 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16177 {
16178 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16179 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16180 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16181 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16182 case 4:
16183 switch (bRm)
16184 {
16185 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16186 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16187 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16188 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16189 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16190 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16191 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16192 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16194 }
16195 break;
16196 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16197 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16198 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16200 }
16201 }
16202 else
16203 {
16204 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16205 {
16206 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16207 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16208 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16209 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16210 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16211 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16212 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16213 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16215 }
16216 }
16217}
16218
16219
16220/**
16221 * Common worker for FPU instructions working on STn and ST0, and storing the
16222 * result in STn unless IE, DE or ZE was raised.
16223 *
16224 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16225 */
16226FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16227{
16228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16229
16230 IEM_MC_BEGIN(3, 1);
16231 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16232 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16233 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16234 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16235
16236 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16237 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16238
16239 IEM_MC_PREPARE_FPU_USAGE();
16240 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16241 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16242 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16243 IEM_MC_ELSE()
16244 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16245 IEM_MC_ENDIF();
16246 IEM_MC_ADVANCE_RIP();
16247
16248 IEM_MC_END();
16249 return VINF_SUCCESS;
16250}
16251
16252
16253/** Opcode 0xdc 11/0. */
16254FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16255{
16256 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16257 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16258}
16259
16260
16261/** Opcode 0xdc 11/1. */
16262FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16263{
16264 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16265 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16266}
16267
16268
16269/** Opcode 0xdc 11/4. */
16270FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16271{
16272 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16273 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16274}
16275
16276
16277/** Opcode 0xdc 11/5. */
16278FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16279{
16280 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16281 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16282}
16283
16284
16285/** Opcode 0xdc 11/6. */
16286FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16287{
16288 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16289 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16290}
16291
16292
16293/** Opcode 0xdc 11/7. */
16294FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16295{
16296 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16297 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16298}
16299
16300
16301/**
16302 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16303 * memory operand, and storing the result in ST0.
16304 *
16305 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16306 */
16307FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16308{
16309 IEM_MC_BEGIN(3, 3);
16310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16311 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16312 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16313 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16314 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16315 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16316
16317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16320 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16321
16322 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16323 IEM_MC_PREPARE_FPU_USAGE();
16324 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16325 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16326 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16327 IEM_MC_ELSE()
16328 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16329 IEM_MC_ENDIF();
16330 IEM_MC_ADVANCE_RIP();
16331
16332 IEM_MC_END();
16333 return VINF_SUCCESS;
16334}
16335
16336
16337/** Opcode 0xdc !11/0. */
16338FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16339{
16340 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16341 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16342}
16343
16344
16345/** Opcode 0xdc !11/1. */
16346FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16347{
16348 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16349 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16350}
16351
16352
16353/** Opcode 0xdc !11/2. */
16354FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16355{
16356 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16357
16358 IEM_MC_BEGIN(3, 3);
16359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16360 IEM_MC_LOCAL(uint16_t, u16Fsw);
16361 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16362 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16363 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16364 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16365
16366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16368
16369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16371 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16372
16373 IEM_MC_PREPARE_FPU_USAGE();
16374 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16375 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16376 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16377 IEM_MC_ELSE()
16378 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16379 IEM_MC_ENDIF();
16380 IEM_MC_ADVANCE_RIP();
16381
16382 IEM_MC_END();
16383 return VINF_SUCCESS;
16384}
16385
16386
16387/** Opcode 0xdc !11/3. */
16388FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16389{
16390 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16391
16392 IEM_MC_BEGIN(3, 3);
16393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16394 IEM_MC_LOCAL(uint16_t, u16Fsw);
16395 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16396 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16397 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16398 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16399
16400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16402
16403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16404 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16405 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16406
16407 IEM_MC_PREPARE_FPU_USAGE();
16408 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16409 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16410 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16411 IEM_MC_ELSE()
16412 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16413 IEM_MC_ENDIF();
16414 IEM_MC_ADVANCE_RIP();
16415
16416 IEM_MC_END();
16417 return VINF_SUCCESS;
16418}
16419
16420
16421/** Opcode 0xdc !11/4. */
16422FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16423{
16424 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16425 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16426}
16427
16428
16429/** Opcode 0xdc !11/5. */
16430FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16431{
16432 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16433 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16434}
16435
16436
16437/** Opcode 0xdc !11/6. */
16438FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16439{
16440 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16441 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16442}
16443
16444
16445/** Opcode 0xdc !11/7. */
16446FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16447{
16448 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16449 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16450}
16451
16452
16453/** Opcode 0xdc. */
16454FNIEMOP_DEF(iemOp_EscF4)
16455{
16456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16457 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16458 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16459 {
16460 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16461 {
16462 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16463 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16464 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16465 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16466 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16467 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16468 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16469 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16471 }
16472 }
16473 else
16474 {
16475 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16476 {
16477 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16478 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16479 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16480 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16481 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16482 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16483 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16484 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16486 }
16487 }
16488}
16489
16490
16491/** Opcode 0xdd !11/0.
16492 * @sa iemOp_fld_m32r */
16493FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16494{
16495 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16496
16497 IEM_MC_BEGIN(2, 3);
16498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16499 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16500 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16501 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16502 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16503
16504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16507 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16508
16509 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16510 IEM_MC_PREPARE_FPU_USAGE();
16511 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16512 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16513 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16514 IEM_MC_ELSE()
16515 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16516 IEM_MC_ENDIF();
16517 IEM_MC_ADVANCE_RIP();
16518
16519 IEM_MC_END();
16520 return VINF_SUCCESS;
16521}
16522
16523
16524/** Opcode 0xdd !11/0. */
16525FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16526{
16527 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16528 IEM_MC_BEGIN(3, 2);
16529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16530 IEM_MC_LOCAL(uint16_t, u16Fsw);
16531 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16532 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16533 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16534
16535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16537 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16538 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16539
16540 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16541 IEM_MC_PREPARE_FPU_USAGE();
16542 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16543 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16544 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16545 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16546 IEM_MC_ELSE()
16547 IEM_MC_IF_FCW_IM()
16548 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16549 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16550 IEM_MC_ENDIF();
16551 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16552 IEM_MC_ENDIF();
16553 IEM_MC_ADVANCE_RIP();
16554
16555 IEM_MC_END();
16556 return VINF_SUCCESS;
16557}
16558
16559
16560/** Opcode 0xdd !11/0. */
16561FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16562{
16563 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16564 IEM_MC_BEGIN(3, 2);
16565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16566 IEM_MC_LOCAL(uint16_t, u16Fsw);
16567 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16568 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16569 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16570
16571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16574 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16575
16576 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16577 IEM_MC_PREPARE_FPU_USAGE();
16578 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16579 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16580 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16581 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16582 IEM_MC_ELSE()
16583 IEM_MC_IF_FCW_IM()
16584 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16585 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16586 IEM_MC_ENDIF();
16587 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16588 IEM_MC_ENDIF();
16589 IEM_MC_ADVANCE_RIP();
16590
16591 IEM_MC_END();
16592 return VINF_SUCCESS;
16593}
16594
16595
16596
16597
16598/** Opcode 0xdd !11/0. */
16599FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16600{
16601 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16602 IEM_MC_BEGIN(3, 2);
16603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16604 IEM_MC_LOCAL(uint16_t, u16Fsw);
16605 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16606 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16607 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16608
16609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16611 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16612 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16613
16614 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16615 IEM_MC_PREPARE_FPU_USAGE();
16616 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16617 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16618 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16619 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16620 IEM_MC_ELSE()
16621 IEM_MC_IF_FCW_IM()
16622 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16623 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16624 IEM_MC_ENDIF();
16625 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16626 IEM_MC_ENDIF();
16627 IEM_MC_ADVANCE_RIP();
16628
16629 IEM_MC_END();
16630 return VINF_SUCCESS;
16631}
16632
16633
16634/** Opcode 0xdd !11/0. */
16635FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16636{
16637 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16638 IEM_MC_BEGIN(3, 0);
16639 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16640 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16641 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16645 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16646 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16647 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16648 IEM_MC_END();
16649 return VINF_SUCCESS;
16650}
16651
16652
16653/** Opcode 0xdd !11/0. */
16654FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16655{
16656 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16657 IEM_MC_BEGIN(3, 0);
16658 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16659 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16660 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16663 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16664 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16665 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16666 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16667 IEM_MC_END();
16668 return VINF_SUCCESS;
16669
16670}
16671
16672/** Opcode 0xdd !11/0. */
16673FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16674{
16675 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16676
16677 IEM_MC_BEGIN(0, 2);
16678 IEM_MC_LOCAL(uint16_t, u16Tmp);
16679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16680
16681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16683 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16684
16685 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16686 IEM_MC_FETCH_FSW(u16Tmp);
16687 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16688 IEM_MC_ADVANCE_RIP();
16689
16690/** @todo Debug / drop a hint to the verifier that things may differ
16691 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16692 * NT4SP1. (X86_FSW_PE) */
16693 IEM_MC_END();
16694 return VINF_SUCCESS;
16695}
16696
16697
16698/** Opcode 0xdd 11/0. */
16699FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16700{
16701 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16703 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16704 unmodified. */
16705
16706 IEM_MC_BEGIN(0, 0);
16707
16708 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16709 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16710
16711 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16712 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16713 IEM_MC_UPDATE_FPU_OPCODE_IP();
16714
16715 IEM_MC_ADVANCE_RIP();
16716 IEM_MC_END();
16717 return VINF_SUCCESS;
16718}
16719
16720
16721/** Opcode 0xdd 11/1. */
16722FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16723{
16724 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16726
16727 IEM_MC_BEGIN(0, 2);
16728 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16729 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16730 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16731 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16732
16733 IEM_MC_PREPARE_FPU_USAGE();
16734 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16735 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16736 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16737 IEM_MC_ELSE()
16738 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16739 IEM_MC_ENDIF();
16740
16741 IEM_MC_ADVANCE_RIP();
16742 IEM_MC_END();
16743 return VINF_SUCCESS;
16744}
16745
16746
16747/** Opcode 0xdd 11/3. */
16748FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16749{
16750 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16751 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16752}
16753
16754
16755/** Opcode 0xdd 11/4. */
16756FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16757{
16758 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16759 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16760}
16761
16762
16763/** Opcode 0xdd. */
16764FNIEMOP_DEF(iemOp_EscF5)
16765{
16766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16767 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16769 {
16770 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16771 {
16772 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16773 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16774 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16775 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16776 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16777 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16778 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16779 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16781 }
16782 }
16783 else
16784 {
16785 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16786 {
16787 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16788 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16789 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16790 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16791 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16792 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16793 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16794 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16796 }
16797 }
16798}
16799
16800
16801/** Opcode 0xde 11/0. */
16802FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16803{
16804 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16805 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16806}
16807
16808
16809/** Opcode 0xde 11/0. */
16810FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16811{
16812 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16813 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16814}
16815
16816
16817/** Opcode 0xde 0xd9. */
16818FNIEMOP_DEF(iemOp_fcompp)
16819{
16820 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16821 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16822}
16823
16824
16825/** Opcode 0xde 11/4. */
16826FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16827{
16828 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16829 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16830}
16831
16832
16833/** Opcode 0xde 11/5. */
16834FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16835{
16836 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16837 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16838}
16839
16840
16841/** Opcode 0xde 11/6. */
16842FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16843{
16844 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16845 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16846}
16847
16848
16849/** Opcode 0xde 11/7. */
16850FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16851{
16852 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16853 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16854}
16855
16856
16857/**
16858 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16859 * the result in ST0.
16860 *
16861 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16862 */
16863FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16864{
16865 IEM_MC_BEGIN(3, 3);
16866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16867 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16868 IEM_MC_LOCAL(int16_t, i16Val2);
16869 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16870 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16871 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16872
16873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16875
16876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16877 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16878 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16879
16880 IEM_MC_PREPARE_FPU_USAGE();
16881 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16882 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16883 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16884 IEM_MC_ELSE()
16885 IEM_MC_FPU_STACK_UNDERFLOW(0);
16886 IEM_MC_ENDIF();
16887 IEM_MC_ADVANCE_RIP();
16888
16889 IEM_MC_END();
16890 return VINF_SUCCESS;
16891}
16892
16893
16894/** Opcode 0xde !11/0. */
16895FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16896{
16897 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16898 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16899}
16900
16901
16902/** Opcode 0xde !11/1. */
16903FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16904{
16905 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16906 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16907}
16908
16909
16910/** Opcode 0xde !11/2. */
16911FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16912{
16913 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16914
16915 IEM_MC_BEGIN(3, 3);
16916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16917 IEM_MC_LOCAL(uint16_t, u16Fsw);
16918 IEM_MC_LOCAL(int16_t, i16Val2);
16919 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16920 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16921 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16922
16923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16925
16926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16927 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16928 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16929
16930 IEM_MC_PREPARE_FPU_USAGE();
16931 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16932 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16933 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16934 IEM_MC_ELSE()
16935 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16936 IEM_MC_ENDIF();
16937 IEM_MC_ADVANCE_RIP();
16938
16939 IEM_MC_END();
16940 return VINF_SUCCESS;
16941}
16942
16943
16944/** Opcode 0xde !11/3. */
16945FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16946{
16947 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16948
16949 IEM_MC_BEGIN(3, 3);
16950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16951 IEM_MC_LOCAL(uint16_t, u16Fsw);
16952 IEM_MC_LOCAL(int16_t, i16Val2);
16953 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16954 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16955 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16956
16957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16959
16960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16961 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16962 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16963
16964 IEM_MC_PREPARE_FPU_USAGE();
16965 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16966 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16967 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16968 IEM_MC_ELSE()
16969 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16970 IEM_MC_ENDIF();
16971 IEM_MC_ADVANCE_RIP();
16972
16973 IEM_MC_END();
16974 return VINF_SUCCESS;
16975}
16976
16977
16978/** Opcode 0xde !11/4. */
16979FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16980{
16981 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16982 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16983}
16984
16985
16986/** Opcode 0xde !11/5. */
16987FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16988{
16989 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16990 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16991}
16992
16993
16994/** Opcode 0xde !11/6. */
16995FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16996{
16997 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16998 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16999}
17000
17001
17002/** Opcode 0xde !11/7. */
17003FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
17004{
17005 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
17006 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
17007}
17008
17009
17010/** Opcode 0xde. */
17011FNIEMOP_DEF(iemOp_EscF6)
17012{
17013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17014 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
17015 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17016 {
17017 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17018 {
17019 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
17020 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
17021 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
17022 case 3: if (bRm == 0xd9)
17023 return FNIEMOP_CALL(iemOp_fcompp);
17024 return IEMOP_RAISE_INVALID_OPCODE();
17025 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
17026 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
17027 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
17028 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
17029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17030 }
17031 }
17032 else
17033 {
17034 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17035 {
17036 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
17037 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
17038 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
17039 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
17040 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
17041 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
17042 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
17043 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
17044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17045 }
17046 }
17047}
17048
17049
17050/** Opcode 0xdf 11/0.
17051 * Undocument instruction, assumed to work like ffree + fincstp. */
17052FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17053{
17054 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17056
17057 IEM_MC_BEGIN(0, 0);
17058
17059 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17060 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17061
17062 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17063 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17064 IEM_MC_FPU_STACK_INC_TOP();
17065 IEM_MC_UPDATE_FPU_OPCODE_IP();
17066
17067 IEM_MC_ADVANCE_RIP();
17068 IEM_MC_END();
17069 return VINF_SUCCESS;
17070}
17071
17072
17073/** Opcode 0xdf 0xe0. */
17074FNIEMOP_DEF(iemOp_fnstsw_ax)
17075{
17076 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17078
17079 IEM_MC_BEGIN(0, 1);
17080 IEM_MC_LOCAL(uint16_t, u16Tmp);
17081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17082 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17083 IEM_MC_FETCH_FSW(u16Tmp);
17084 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17085 IEM_MC_ADVANCE_RIP();
17086 IEM_MC_END();
17087 return VINF_SUCCESS;
17088}
17089
17090
17091/** Opcode 0xdf 11/5. */
17092FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17093{
17094 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17095 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17096}
17097
17098
17099/** Opcode 0xdf 11/6. */
17100FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17101{
17102 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17103 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17104}
17105
17106
17107/** Opcode 0xdf !11/0. */
17108FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17109{
17110 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17111
17112 IEM_MC_BEGIN(2, 3);
17113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17114 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17115 IEM_MC_LOCAL(int16_t, i16Val);
17116 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17117 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17118
17119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17121
17122 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17123 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17124 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17125
17126 IEM_MC_PREPARE_FPU_USAGE();
17127 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17128 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17129 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17130 IEM_MC_ELSE()
17131 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17132 IEM_MC_ENDIF();
17133 IEM_MC_ADVANCE_RIP();
17134
17135 IEM_MC_END();
17136 return VINF_SUCCESS;
17137}
17138
17139
17140/** Opcode 0xdf !11/1. */
17141FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17142{
17143 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17144 IEM_MC_BEGIN(3, 2);
17145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17146 IEM_MC_LOCAL(uint16_t, u16Fsw);
17147 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17148 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17149 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17150
17151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17153 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17154 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17155
17156 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17157 IEM_MC_PREPARE_FPU_USAGE();
17158 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17159 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17160 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17161 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17162 IEM_MC_ELSE()
17163 IEM_MC_IF_FCW_IM()
17164 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17165 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17166 IEM_MC_ENDIF();
17167 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17168 IEM_MC_ENDIF();
17169 IEM_MC_ADVANCE_RIP();
17170
17171 IEM_MC_END();
17172 return VINF_SUCCESS;
17173}
17174
17175
17176/** Opcode 0xdf !11/2. */
17177FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17178{
17179 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17180 IEM_MC_BEGIN(3, 2);
17181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17182 IEM_MC_LOCAL(uint16_t, u16Fsw);
17183 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17184 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17185 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17186
17187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17190 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17191
17192 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17193 IEM_MC_PREPARE_FPU_USAGE();
17194 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17195 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17196 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17197 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17198 IEM_MC_ELSE()
17199 IEM_MC_IF_FCW_IM()
17200 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17201 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17202 IEM_MC_ENDIF();
17203 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17204 IEM_MC_ENDIF();
17205 IEM_MC_ADVANCE_RIP();
17206
17207 IEM_MC_END();
17208 return VINF_SUCCESS;
17209}
17210
17211
17212/** Opcode 0xdf !11/3. */
17213FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17214{
17215 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17216 IEM_MC_BEGIN(3, 2);
17217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17218 IEM_MC_LOCAL(uint16_t, u16Fsw);
17219 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17220 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17221 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17222
17223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17225 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17226 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17227
17228 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17229 IEM_MC_PREPARE_FPU_USAGE();
17230 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17231 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17232 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17233 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17234 IEM_MC_ELSE()
17235 IEM_MC_IF_FCW_IM()
17236 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17237 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17238 IEM_MC_ENDIF();
17239 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17240 IEM_MC_ENDIF();
17241 IEM_MC_ADVANCE_RIP();
17242
17243 IEM_MC_END();
17244 return VINF_SUCCESS;
17245}
17246
17247
17248/** Opcode 0xdf !11/4. */
17249FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17250
17251
17252/** Opcode 0xdf !11/5. */
17253FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17254{
17255 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17256
17257 IEM_MC_BEGIN(2, 3);
17258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17259 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17260 IEM_MC_LOCAL(int64_t, i64Val);
17261 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17262 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17263
17264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17266
17267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17268 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17269 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17270
17271 IEM_MC_PREPARE_FPU_USAGE();
17272 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17273 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17274 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17275 IEM_MC_ELSE()
17276 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17277 IEM_MC_ENDIF();
17278 IEM_MC_ADVANCE_RIP();
17279
17280 IEM_MC_END();
17281 return VINF_SUCCESS;
17282}
17283
17284
17285/** Opcode 0xdf !11/6. */
17286FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17287
17288
17289/** Opcode 0xdf !11/7. */
17290FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17291{
17292 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17293 IEM_MC_BEGIN(3, 2);
17294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17295 IEM_MC_LOCAL(uint16_t, u16Fsw);
17296 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17297 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17298 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17299
17300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17304
17305 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17306 IEM_MC_PREPARE_FPU_USAGE();
17307 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17308 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17309 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17310 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17311 IEM_MC_ELSE()
17312 IEM_MC_IF_FCW_IM()
17313 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17314 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17315 IEM_MC_ENDIF();
17316 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17317 IEM_MC_ENDIF();
17318 IEM_MC_ADVANCE_RIP();
17319
17320 IEM_MC_END();
17321 return VINF_SUCCESS;
17322}
17323
17324
17325/** Opcode 0xdf. */
17326FNIEMOP_DEF(iemOp_EscF7)
17327{
17328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17330 {
17331 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17332 {
17333 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17334 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17335 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17336 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17337 case 4: if (bRm == 0xe0)
17338 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17339 return IEMOP_RAISE_INVALID_OPCODE();
17340 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17341 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17342 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17344 }
17345 }
17346 else
17347 {
17348 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17349 {
17350 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17351 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17352 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17353 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17354 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17355 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17356 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17357 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17359 }
17360 }
17361}
17362
17363
17364/** Opcode 0xe0. */
17365FNIEMOP_DEF(iemOp_loopne_Jb)
17366{
17367 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17368 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17370 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17371
17372 switch (pVCpu->iem.s.enmEffAddrMode)
17373 {
17374 case IEMMODE_16BIT:
17375 IEM_MC_BEGIN(0,0);
17376 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17377 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17378 IEM_MC_REL_JMP_S8(i8Imm);
17379 } IEM_MC_ELSE() {
17380 IEM_MC_ADVANCE_RIP();
17381 } IEM_MC_ENDIF();
17382 IEM_MC_END();
17383 return VINF_SUCCESS;
17384
17385 case IEMMODE_32BIT:
17386 IEM_MC_BEGIN(0,0);
17387 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17388 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17389 IEM_MC_REL_JMP_S8(i8Imm);
17390 } IEM_MC_ELSE() {
17391 IEM_MC_ADVANCE_RIP();
17392 } IEM_MC_ENDIF();
17393 IEM_MC_END();
17394 return VINF_SUCCESS;
17395
17396 case IEMMODE_64BIT:
17397 IEM_MC_BEGIN(0,0);
17398 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17399 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17400 IEM_MC_REL_JMP_S8(i8Imm);
17401 } IEM_MC_ELSE() {
17402 IEM_MC_ADVANCE_RIP();
17403 } IEM_MC_ENDIF();
17404 IEM_MC_END();
17405 return VINF_SUCCESS;
17406
17407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17408 }
17409}
17410
17411
17412/** Opcode 0xe1. */
17413FNIEMOP_DEF(iemOp_loope_Jb)
17414{
17415 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17416 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17419
17420 switch (pVCpu->iem.s.enmEffAddrMode)
17421 {
17422 case IEMMODE_16BIT:
17423 IEM_MC_BEGIN(0,0);
17424 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17425 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17426 IEM_MC_REL_JMP_S8(i8Imm);
17427 } IEM_MC_ELSE() {
17428 IEM_MC_ADVANCE_RIP();
17429 } IEM_MC_ENDIF();
17430 IEM_MC_END();
17431 return VINF_SUCCESS;
17432
17433 case IEMMODE_32BIT:
17434 IEM_MC_BEGIN(0,0);
17435 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17436 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17437 IEM_MC_REL_JMP_S8(i8Imm);
17438 } IEM_MC_ELSE() {
17439 IEM_MC_ADVANCE_RIP();
17440 } IEM_MC_ENDIF();
17441 IEM_MC_END();
17442 return VINF_SUCCESS;
17443
17444 case IEMMODE_64BIT:
17445 IEM_MC_BEGIN(0,0);
17446 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17447 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17448 IEM_MC_REL_JMP_S8(i8Imm);
17449 } IEM_MC_ELSE() {
17450 IEM_MC_ADVANCE_RIP();
17451 } IEM_MC_ENDIF();
17452 IEM_MC_END();
17453 return VINF_SUCCESS;
17454
17455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17456 }
17457}
17458
17459
17460/** Opcode 0xe2. */
17461FNIEMOP_DEF(iemOp_loop_Jb)
17462{
17463 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17464 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17466 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17467
17468 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17469 * using the 32-bit operand size override. How can that be restarted? See
17470 * weird pseudo code in intel manual. */
17471 switch (pVCpu->iem.s.enmEffAddrMode)
17472 {
17473 case IEMMODE_16BIT:
17474 IEM_MC_BEGIN(0,0);
17475 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17476 {
17477 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17478 IEM_MC_IF_CX_IS_NZ() {
17479 IEM_MC_REL_JMP_S8(i8Imm);
17480 } IEM_MC_ELSE() {
17481 IEM_MC_ADVANCE_RIP();
17482 } IEM_MC_ENDIF();
17483 }
17484 else
17485 {
17486 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17487 IEM_MC_ADVANCE_RIP();
17488 }
17489 IEM_MC_END();
17490 return VINF_SUCCESS;
17491
17492 case IEMMODE_32BIT:
17493 IEM_MC_BEGIN(0,0);
17494 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17495 {
17496 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17497 IEM_MC_IF_ECX_IS_NZ() {
17498 IEM_MC_REL_JMP_S8(i8Imm);
17499 } IEM_MC_ELSE() {
17500 IEM_MC_ADVANCE_RIP();
17501 } IEM_MC_ENDIF();
17502 }
17503 else
17504 {
17505 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17506 IEM_MC_ADVANCE_RIP();
17507 }
17508 IEM_MC_END();
17509 return VINF_SUCCESS;
17510
17511 case IEMMODE_64BIT:
17512 IEM_MC_BEGIN(0,0);
17513 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17514 {
17515 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17516 IEM_MC_IF_RCX_IS_NZ() {
17517 IEM_MC_REL_JMP_S8(i8Imm);
17518 } IEM_MC_ELSE() {
17519 IEM_MC_ADVANCE_RIP();
17520 } IEM_MC_ENDIF();
17521 }
17522 else
17523 {
17524 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17525 IEM_MC_ADVANCE_RIP();
17526 }
17527 IEM_MC_END();
17528 return VINF_SUCCESS;
17529
17530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17531 }
17532}
17533
17534
17535/** Opcode 0xe3. */
17536FNIEMOP_DEF(iemOp_jecxz_Jb)
17537{
17538 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17539 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17541 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17542
17543 switch (pVCpu->iem.s.enmEffAddrMode)
17544 {
17545 case IEMMODE_16BIT:
17546 IEM_MC_BEGIN(0,0);
17547 IEM_MC_IF_CX_IS_NZ() {
17548 IEM_MC_ADVANCE_RIP();
17549 } IEM_MC_ELSE() {
17550 IEM_MC_REL_JMP_S8(i8Imm);
17551 } IEM_MC_ENDIF();
17552 IEM_MC_END();
17553 return VINF_SUCCESS;
17554
17555 case IEMMODE_32BIT:
17556 IEM_MC_BEGIN(0,0);
17557 IEM_MC_IF_ECX_IS_NZ() {
17558 IEM_MC_ADVANCE_RIP();
17559 } IEM_MC_ELSE() {
17560 IEM_MC_REL_JMP_S8(i8Imm);
17561 } IEM_MC_ENDIF();
17562 IEM_MC_END();
17563 return VINF_SUCCESS;
17564
17565 case IEMMODE_64BIT:
17566 IEM_MC_BEGIN(0,0);
17567 IEM_MC_IF_RCX_IS_NZ() {
17568 IEM_MC_ADVANCE_RIP();
17569 } IEM_MC_ELSE() {
17570 IEM_MC_REL_JMP_S8(i8Imm);
17571 } IEM_MC_ENDIF();
17572 IEM_MC_END();
17573 return VINF_SUCCESS;
17574
17575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17576 }
17577}
17578
17579
17580/** Opcode 0xe4 */
17581FNIEMOP_DEF(iemOp_in_AL_Ib)
17582{
17583 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17584 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17586 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17587}
17588
17589
17590/** Opcode 0xe5 */
17591FNIEMOP_DEF(iemOp_in_eAX_Ib)
17592{
17593 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17594 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17596 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17597}
17598
17599
17600/** Opcode 0xe6 */
17601FNIEMOP_DEF(iemOp_out_Ib_AL)
17602{
17603 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17604 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17606 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17607}
17608
17609
17610/** Opcode 0xe7 */
17611FNIEMOP_DEF(iemOp_out_Ib_eAX)
17612{
17613 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17614 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17616 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17617}
17618
17619
17620/** Opcode 0xe8. */
17621FNIEMOP_DEF(iemOp_call_Jv)
17622{
17623 IEMOP_MNEMONIC(call_Jv, "call Jv");
17624 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17625 switch (pVCpu->iem.s.enmEffOpSize)
17626 {
17627 case IEMMODE_16BIT:
17628 {
17629 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17630 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17631 }
17632
17633 case IEMMODE_32BIT:
17634 {
17635 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17636 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17637 }
17638
17639 case IEMMODE_64BIT:
17640 {
17641 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17642 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17643 }
17644
17645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17646 }
17647}
17648
17649
17650/** Opcode 0xe9. */
17651FNIEMOP_DEF(iemOp_jmp_Jv)
17652{
17653 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17654 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17655 switch (pVCpu->iem.s.enmEffOpSize)
17656 {
17657 case IEMMODE_16BIT:
17658 {
17659 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17660 IEM_MC_BEGIN(0, 0);
17661 IEM_MC_REL_JMP_S16(i16Imm);
17662 IEM_MC_END();
17663 return VINF_SUCCESS;
17664 }
17665
17666 case IEMMODE_64BIT:
17667 case IEMMODE_32BIT:
17668 {
17669 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17670 IEM_MC_BEGIN(0, 0);
17671 IEM_MC_REL_JMP_S32(i32Imm);
17672 IEM_MC_END();
17673 return VINF_SUCCESS;
17674 }
17675
17676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17677 }
17678}
17679
17680
17681/** Opcode 0xea. */
17682FNIEMOP_DEF(iemOp_jmp_Ap)
17683{
17684 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17685 IEMOP_HLP_NO_64BIT();
17686
17687 /* Decode the far pointer address and pass it on to the far call C implementation. */
17688 uint32_t offSeg;
17689 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17690 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17691 else
17692 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17693 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17695 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17696}
17697
17698
17699/** Opcode 0xeb. */
17700FNIEMOP_DEF(iemOp_jmp_Jb)
17701{
17702 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17703 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17705 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17706
17707 IEM_MC_BEGIN(0, 0);
17708 IEM_MC_REL_JMP_S8(i8Imm);
17709 IEM_MC_END();
17710 return VINF_SUCCESS;
17711}
17712
17713
17714/** Opcode 0xec */
17715FNIEMOP_DEF(iemOp_in_AL_DX)
17716{
17717 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17719 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17720}
17721
17722
17723/** Opcode 0xed */
17724FNIEMOP_DEF(iemOp_eAX_DX)
17725{
17726 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17728 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17729}
17730
17731
17732/** Opcode 0xee */
17733FNIEMOP_DEF(iemOp_out_DX_AL)
17734{
17735 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17737 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17738}
17739
17740
17741/** Opcode 0xef */
17742FNIEMOP_DEF(iemOp_out_DX_eAX)
17743{
17744 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17746 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17747}
17748
17749
17750/** Opcode 0xf0. */
17751FNIEMOP_DEF(iemOp_lock)
17752{
17753 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17754 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17755
17756 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17757 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17758}
17759
17760
17761/** Opcode 0xf1. */
17762FNIEMOP_DEF(iemOp_int_1)
17763{
17764 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17765 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17766 /** @todo testcase! */
17767 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17768}
17769
17770
17771/** Opcode 0xf2. */
17772FNIEMOP_DEF(iemOp_repne)
17773{
17774 /* This overrides any previous REPE prefix. */
17775 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17776 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17777 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17778
17779 /* For the 4 entry opcode tables, REPNZ overrides any previous
17780 REPZ and operand size prefixes. */
17781 pVCpu->iem.s.idxPrefix = 3;
17782
17783 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17784 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17785}
17786
17787
17788/** Opcode 0xf3. */
17789FNIEMOP_DEF(iemOp_repe)
17790{
17791 /* This overrides any previous REPNE prefix. */
17792 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17793 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17794 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17795
17796 /* For the 4 entry opcode tables, REPNZ overrides any previous
17797 REPNZ and operand size prefixes. */
17798 pVCpu->iem.s.idxPrefix = 2;
17799
17800 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17801 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17802}
17803
17804
17805/** Opcode 0xf4. */
17806FNIEMOP_DEF(iemOp_hlt)
17807{
17808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17809 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17810}
17811
17812
17813/** Opcode 0xf5. */
17814FNIEMOP_DEF(iemOp_cmc)
17815{
17816 IEMOP_MNEMONIC(cmc, "cmc");
17817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17818 IEM_MC_BEGIN(0, 0);
17819 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17820 IEM_MC_ADVANCE_RIP();
17821 IEM_MC_END();
17822 return VINF_SUCCESS;
17823}
17824
17825
17826/**
17827 * Common implementation of 'inc/dec/not/neg Eb'.
17828 *
17829 * @param bRm The RM byte.
17830 * @param pImpl The instruction implementation.
17831 */
17832FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17833{
17834 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17835 {
17836 /* register access */
17837 IEM_MC_BEGIN(2, 0);
17838 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17839 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17840 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17841 IEM_MC_REF_EFLAGS(pEFlags);
17842 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17843 IEM_MC_ADVANCE_RIP();
17844 IEM_MC_END();
17845 }
17846 else
17847 {
17848 /* memory access. */
17849 IEM_MC_BEGIN(2, 2);
17850 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17851 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17853
17854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17855 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17856 IEM_MC_FETCH_EFLAGS(EFlags);
17857 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17858 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17859 else
17860 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17861
17862 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17863 IEM_MC_COMMIT_EFLAGS(EFlags);
17864 IEM_MC_ADVANCE_RIP();
17865 IEM_MC_END();
17866 }
17867 return VINF_SUCCESS;
17868}
17869
17870
17871/**
17872 * Common implementation of 'inc/dec/not/neg Ev'.
17873 *
17874 * @param bRm The RM byte.
17875 * @param pImpl The instruction implementation.
17876 */
17877FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17878{
17879 /* Registers are handled by a common worker. */
17880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17881 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17882
17883 /* Memory we do here. */
17884 switch (pVCpu->iem.s.enmEffOpSize)
17885 {
17886 case IEMMODE_16BIT:
17887 IEM_MC_BEGIN(2, 2);
17888 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17889 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17891
17892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17893 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17894 IEM_MC_FETCH_EFLAGS(EFlags);
17895 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17896 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17897 else
17898 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17899
17900 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17901 IEM_MC_COMMIT_EFLAGS(EFlags);
17902 IEM_MC_ADVANCE_RIP();
17903 IEM_MC_END();
17904 return VINF_SUCCESS;
17905
17906 case IEMMODE_32BIT:
17907 IEM_MC_BEGIN(2, 2);
17908 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17909 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17911
17912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17913 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17914 IEM_MC_FETCH_EFLAGS(EFlags);
17915 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17916 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17917 else
17918 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17919
17920 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17921 IEM_MC_COMMIT_EFLAGS(EFlags);
17922 IEM_MC_ADVANCE_RIP();
17923 IEM_MC_END();
17924 return VINF_SUCCESS;
17925
17926 case IEMMODE_64BIT:
17927 IEM_MC_BEGIN(2, 2);
17928 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17931
17932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17933 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17934 IEM_MC_FETCH_EFLAGS(EFlags);
17935 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17936 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17937 else
17938 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17939
17940 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17941 IEM_MC_COMMIT_EFLAGS(EFlags);
17942 IEM_MC_ADVANCE_RIP();
17943 IEM_MC_END();
17944 return VINF_SUCCESS;
17945
17946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17947 }
17948}
17949
17950
17951/** Opcode 0xf6 /0. */
17952FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17953{
17954 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17955 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17956
17957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17958 {
17959 /* register access */
17960 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17962
17963 IEM_MC_BEGIN(3, 0);
17964 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17965 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17966 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17967 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17968 IEM_MC_REF_EFLAGS(pEFlags);
17969 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17970 IEM_MC_ADVANCE_RIP();
17971 IEM_MC_END();
17972 }
17973 else
17974 {
17975 /* memory access. */
17976 IEM_MC_BEGIN(3, 2);
17977 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17978 IEM_MC_ARG(uint8_t, u8Src, 1);
17979 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17981
17982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17983 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17984 IEM_MC_ASSIGN(u8Src, u8Imm);
17985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17986 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17987 IEM_MC_FETCH_EFLAGS(EFlags);
17988 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17989
17990 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17991 IEM_MC_COMMIT_EFLAGS(EFlags);
17992 IEM_MC_ADVANCE_RIP();
17993 IEM_MC_END();
17994 }
17995 return VINF_SUCCESS;
17996}
17997
17998
17999/** Opcode 0xf7 /0. */
18000FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
18001{
18002 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
18003 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18004
18005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18006 {
18007 /* register access */
18008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18009 switch (pVCpu->iem.s.enmEffOpSize)
18010 {
18011 case IEMMODE_16BIT:
18012 {
18013 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18014 IEM_MC_BEGIN(3, 0);
18015 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18016 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
18017 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18018 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18019 IEM_MC_REF_EFLAGS(pEFlags);
18020 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18021 IEM_MC_ADVANCE_RIP();
18022 IEM_MC_END();
18023 return VINF_SUCCESS;
18024 }
18025
18026 case IEMMODE_32BIT:
18027 {
18028 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18029 IEM_MC_BEGIN(3, 0);
18030 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18031 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
18032 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18033 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18034 IEM_MC_REF_EFLAGS(pEFlags);
18035 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18036 /* No clearing the high dword here - test doesn't write back the result. */
18037 IEM_MC_ADVANCE_RIP();
18038 IEM_MC_END();
18039 return VINF_SUCCESS;
18040 }
18041
18042 case IEMMODE_64BIT:
18043 {
18044 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18045 IEM_MC_BEGIN(3, 0);
18046 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18047 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
18048 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18049 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18050 IEM_MC_REF_EFLAGS(pEFlags);
18051 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18052 IEM_MC_ADVANCE_RIP();
18053 IEM_MC_END();
18054 return VINF_SUCCESS;
18055 }
18056
18057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18058 }
18059 }
18060 else
18061 {
18062 /* memory access. */
18063 switch (pVCpu->iem.s.enmEffOpSize)
18064 {
18065 case IEMMODE_16BIT:
18066 {
18067 IEM_MC_BEGIN(3, 2);
18068 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18069 IEM_MC_ARG(uint16_t, u16Src, 1);
18070 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18072
18073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18074 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18075 IEM_MC_ASSIGN(u16Src, u16Imm);
18076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18077 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18078 IEM_MC_FETCH_EFLAGS(EFlags);
18079 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18080
18081 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18082 IEM_MC_COMMIT_EFLAGS(EFlags);
18083 IEM_MC_ADVANCE_RIP();
18084 IEM_MC_END();
18085 return VINF_SUCCESS;
18086 }
18087
18088 case IEMMODE_32BIT:
18089 {
18090 IEM_MC_BEGIN(3, 2);
18091 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18092 IEM_MC_ARG(uint32_t, u32Src, 1);
18093 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18095
18096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18097 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18098 IEM_MC_ASSIGN(u32Src, u32Imm);
18099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18100 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18101 IEM_MC_FETCH_EFLAGS(EFlags);
18102 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18103
18104 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18105 IEM_MC_COMMIT_EFLAGS(EFlags);
18106 IEM_MC_ADVANCE_RIP();
18107 IEM_MC_END();
18108 return VINF_SUCCESS;
18109 }
18110
18111 case IEMMODE_64BIT:
18112 {
18113 IEM_MC_BEGIN(3, 2);
18114 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18115 IEM_MC_ARG(uint64_t, u64Src, 1);
18116 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18118
18119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18120 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18121 IEM_MC_ASSIGN(u64Src, u64Imm);
18122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18123 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18124 IEM_MC_FETCH_EFLAGS(EFlags);
18125 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18126
18127 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18128 IEM_MC_COMMIT_EFLAGS(EFlags);
18129 IEM_MC_ADVANCE_RIP();
18130 IEM_MC_END();
18131 return VINF_SUCCESS;
18132 }
18133
18134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18135 }
18136 }
18137}
18138
18139
18140/** Opcode 0xf6 /4, /5, /6 and /7. */
18141FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18142{
18143 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18144 {
18145 /* register access */
18146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18147 IEM_MC_BEGIN(3, 1);
18148 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18149 IEM_MC_ARG(uint8_t, u8Value, 1);
18150 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18151 IEM_MC_LOCAL(int32_t, rc);
18152
18153 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18154 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18155 IEM_MC_REF_EFLAGS(pEFlags);
18156 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18157 IEM_MC_IF_LOCAL_IS_Z(rc) {
18158 IEM_MC_ADVANCE_RIP();
18159 } IEM_MC_ELSE() {
18160 IEM_MC_RAISE_DIVIDE_ERROR();
18161 } IEM_MC_ENDIF();
18162
18163 IEM_MC_END();
18164 }
18165 else
18166 {
18167 /* memory access. */
18168 IEM_MC_BEGIN(3, 2);
18169 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18170 IEM_MC_ARG(uint8_t, u8Value, 1);
18171 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18173 IEM_MC_LOCAL(int32_t, rc);
18174
18175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18177 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18178 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18179 IEM_MC_REF_EFLAGS(pEFlags);
18180 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18181 IEM_MC_IF_LOCAL_IS_Z(rc) {
18182 IEM_MC_ADVANCE_RIP();
18183 } IEM_MC_ELSE() {
18184 IEM_MC_RAISE_DIVIDE_ERROR();
18185 } IEM_MC_ENDIF();
18186
18187 IEM_MC_END();
18188 }
18189 return VINF_SUCCESS;
18190}
18191
18192
18193/** Opcode 0xf7 /4, /5, /6 and /7. */
18194FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18195{
18196 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18197
18198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18199 {
18200 /* register access */
18201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18202 switch (pVCpu->iem.s.enmEffOpSize)
18203 {
18204 case IEMMODE_16BIT:
18205 {
18206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18207 IEM_MC_BEGIN(4, 1);
18208 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18209 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18210 IEM_MC_ARG(uint16_t, u16Value, 2);
18211 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18212 IEM_MC_LOCAL(int32_t, rc);
18213
18214 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18215 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18216 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18217 IEM_MC_REF_EFLAGS(pEFlags);
18218 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18219 IEM_MC_IF_LOCAL_IS_Z(rc) {
18220 IEM_MC_ADVANCE_RIP();
18221 } IEM_MC_ELSE() {
18222 IEM_MC_RAISE_DIVIDE_ERROR();
18223 } IEM_MC_ENDIF();
18224
18225 IEM_MC_END();
18226 return VINF_SUCCESS;
18227 }
18228
18229 case IEMMODE_32BIT:
18230 {
18231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18232 IEM_MC_BEGIN(4, 1);
18233 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18234 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18235 IEM_MC_ARG(uint32_t, u32Value, 2);
18236 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18237 IEM_MC_LOCAL(int32_t, rc);
18238
18239 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18240 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18241 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18242 IEM_MC_REF_EFLAGS(pEFlags);
18243 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18244 IEM_MC_IF_LOCAL_IS_Z(rc) {
18245 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18246 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18247 IEM_MC_ADVANCE_RIP();
18248 } IEM_MC_ELSE() {
18249 IEM_MC_RAISE_DIVIDE_ERROR();
18250 } IEM_MC_ENDIF();
18251
18252 IEM_MC_END();
18253 return VINF_SUCCESS;
18254 }
18255
18256 case IEMMODE_64BIT:
18257 {
18258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18259 IEM_MC_BEGIN(4, 1);
18260 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18261 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18262 IEM_MC_ARG(uint64_t, u64Value, 2);
18263 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18264 IEM_MC_LOCAL(int32_t, rc);
18265
18266 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18267 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18268 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18269 IEM_MC_REF_EFLAGS(pEFlags);
18270 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18271 IEM_MC_IF_LOCAL_IS_Z(rc) {
18272 IEM_MC_ADVANCE_RIP();
18273 } IEM_MC_ELSE() {
18274 IEM_MC_RAISE_DIVIDE_ERROR();
18275 } IEM_MC_ENDIF();
18276
18277 IEM_MC_END();
18278 return VINF_SUCCESS;
18279 }
18280
18281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18282 }
18283 }
18284 else
18285 {
18286 /* memory access. */
18287 switch (pVCpu->iem.s.enmEffOpSize)
18288 {
18289 case IEMMODE_16BIT:
18290 {
18291 IEM_MC_BEGIN(4, 2);
18292 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18293 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18294 IEM_MC_ARG(uint16_t, u16Value, 2);
18295 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18297 IEM_MC_LOCAL(int32_t, rc);
18298
18299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18301 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18302 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18303 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18304 IEM_MC_REF_EFLAGS(pEFlags);
18305 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18306 IEM_MC_IF_LOCAL_IS_Z(rc) {
18307 IEM_MC_ADVANCE_RIP();
18308 } IEM_MC_ELSE() {
18309 IEM_MC_RAISE_DIVIDE_ERROR();
18310 } IEM_MC_ENDIF();
18311
18312 IEM_MC_END();
18313 return VINF_SUCCESS;
18314 }
18315
18316 case IEMMODE_32BIT:
18317 {
18318 IEM_MC_BEGIN(4, 2);
18319 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18320 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18321 IEM_MC_ARG(uint32_t, u32Value, 2);
18322 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18324 IEM_MC_LOCAL(int32_t, rc);
18325
18326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18328 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18329 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18330 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18331 IEM_MC_REF_EFLAGS(pEFlags);
18332 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18333 IEM_MC_IF_LOCAL_IS_Z(rc) {
18334 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18335 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18336 IEM_MC_ADVANCE_RIP();
18337 } IEM_MC_ELSE() {
18338 IEM_MC_RAISE_DIVIDE_ERROR();
18339 } IEM_MC_ENDIF();
18340
18341 IEM_MC_END();
18342 return VINF_SUCCESS;
18343 }
18344
18345 case IEMMODE_64BIT:
18346 {
18347 IEM_MC_BEGIN(4, 2);
18348 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18349 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18350 IEM_MC_ARG(uint64_t, u64Value, 2);
18351 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18353 IEM_MC_LOCAL(int32_t, rc);
18354
18355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18357 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18358 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18359 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18360 IEM_MC_REF_EFLAGS(pEFlags);
18361 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18362 IEM_MC_IF_LOCAL_IS_Z(rc) {
18363 IEM_MC_ADVANCE_RIP();
18364 } IEM_MC_ELSE() {
18365 IEM_MC_RAISE_DIVIDE_ERROR();
18366 } IEM_MC_ENDIF();
18367
18368 IEM_MC_END();
18369 return VINF_SUCCESS;
18370 }
18371
18372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18373 }
18374 }
18375}
18376
18377/** Opcode 0xf6. */
18378FNIEMOP_DEF(iemOp_Grp3_Eb)
18379{
18380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18381 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18382 {
18383 case 0:
18384 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18385 case 1:
18386/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18387 return IEMOP_RAISE_INVALID_OPCODE();
18388 case 2:
18389 IEMOP_MNEMONIC(not_Eb, "not Eb");
18390 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18391 case 3:
18392 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18393 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18394 case 4:
18395 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18397 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18398 case 5:
18399 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18400 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18401 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18402 case 6:
18403 IEMOP_MNEMONIC(div_Eb, "div Eb");
18404 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18405 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18406 case 7:
18407 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18408 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18409 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18411 }
18412}
18413
18414
18415/** Opcode 0xf7. */
18416FNIEMOP_DEF(iemOp_Grp3_Ev)
18417{
18418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18419 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18420 {
18421 case 0:
18422 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18423 case 1:
18424/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18425 return IEMOP_RAISE_INVALID_OPCODE();
18426 case 2:
18427 IEMOP_MNEMONIC(not_Ev, "not Ev");
18428 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18429 case 3:
18430 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18431 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18432 case 4:
18433 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18434 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18435 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18436 case 5:
18437 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18438 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18439 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18440 case 6:
18441 IEMOP_MNEMONIC(div_Ev, "div Ev");
18442 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18443 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18444 case 7:
18445 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18446 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18447 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18449 }
18450}
18451
18452
18453/** Opcode 0xf8. */
18454FNIEMOP_DEF(iemOp_clc)
18455{
18456 IEMOP_MNEMONIC(clc, "clc");
18457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18458 IEM_MC_BEGIN(0, 0);
18459 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18460 IEM_MC_ADVANCE_RIP();
18461 IEM_MC_END();
18462 return VINF_SUCCESS;
18463}
18464
18465
18466/** Opcode 0xf9. */
18467FNIEMOP_DEF(iemOp_stc)
18468{
18469 IEMOP_MNEMONIC(stc, "stc");
18470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18471 IEM_MC_BEGIN(0, 0);
18472 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18473 IEM_MC_ADVANCE_RIP();
18474 IEM_MC_END();
18475 return VINF_SUCCESS;
18476}
18477
18478
18479/** Opcode 0xfa. */
18480FNIEMOP_DEF(iemOp_cli)
18481{
18482 IEMOP_MNEMONIC(cli, "cli");
18483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18484 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18485}
18486
18487
18488FNIEMOP_DEF(iemOp_sti)
18489{
18490 IEMOP_MNEMONIC(sti, "sti");
18491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18492 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18493}
18494
18495
18496/** Opcode 0xfc. */
18497FNIEMOP_DEF(iemOp_cld)
18498{
18499 IEMOP_MNEMONIC(cld, "cld");
18500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18501 IEM_MC_BEGIN(0, 0);
18502 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18503 IEM_MC_ADVANCE_RIP();
18504 IEM_MC_END();
18505 return VINF_SUCCESS;
18506}
18507
18508
18509/** Opcode 0xfd. */
18510FNIEMOP_DEF(iemOp_std)
18511{
18512 IEMOP_MNEMONIC(std, "std");
18513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18514 IEM_MC_BEGIN(0, 0);
18515 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18516 IEM_MC_ADVANCE_RIP();
18517 IEM_MC_END();
18518 return VINF_SUCCESS;
18519}
18520
18521
18522/** Opcode 0xfe. */
18523FNIEMOP_DEF(iemOp_Grp4)
18524{
18525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18526 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18527 {
18528 case 0:
18529 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18530 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18531 case 1:
18532 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18533 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18534 default:
18535 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18536 return IEMOP_RAISE_INVALID_OPCODE();
18537 }
18538}
18539
18540
18541/**
18542 * Opcode 0xff /2.
18543 * @param bRm The RM byte.
18544 */
18545FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18546{
18547 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18548 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18549
18550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18551 {
18552 /* The new RIP is taken from a register. */
18553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18554 switch (pVCpu->iem.s.enmEffOpSize)
18555 {
18556 case IEMMODE_16BIT:
18557 IEM_MC_BEGIN(1, 0);
18558 IEM_MC_ARG(uint16_t, u16Target, 0);
18559 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18560 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18561 IEM_MC_END()
18562 return VINF_SUCCESS;
18563
18564 case IEMMODE_32BIT:
18565 IEM_MC_BEGIN(1, 0);
18566 IEM_MC_ARG(uint32_t, u32Target, 0);
18567 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18568 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18569 IEM_MC_END()
18570 return VINF_SUCCESS;
18571
18572 case IEMMODE_64BIT:
18573 IEM_MC_BEGIN(1, 0);
18574 IEM_MC_ARG(uint64_t, u64Target, 0);
18575 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18576 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18577 IEM_MC_END()
18578 return VINF_SUCCESS;
18579
18580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18581 }
18582 }
18583 else
18584 {
18585 /* The new RIP is taken from a register. */
18586 switch (pVCpu->iem.s.enmEffOpSize)
18587 {
18588 case IEMMODE_16BIT:
18589 IEM_MC_BEGIN(1, 1);
18590 IEM_MC_ARG(uint16_t, u16Target, 0);
18591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18594 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18595 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18596 IEM_MC_END()
18597 return VINF_SUCCESS;
18598
18599 case IEMMODE_32BIT:
18600 IEM_MC_BEGIN(1, 1);
18601 IEM_MC_ARG(uint32_t, u32Target, 0);
18602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18605 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18606 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18607 IEM_MC_END()
18608 return VINF_SUCCESS;
18609
18610 case IEMMODE_64BIT:
18611 IEM_MC_BEGIN(1, 1);
18612 IEM_MC_ARG(uint64_t, u64Target, 0);
18613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18616 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18617 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18618 IEM_MC_END()
18619 return VINF_SUCCESS;
18620
18621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18622 }
18623 }
18624}
18625
18626typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18627
18628FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18629{
18630 /* Registers? How?? */
18631 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18632 { /* likely */ }
18633 else
18634 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18635
18636 /* Far pointer loaded from memory. */
18637 switch (pVCpu->iem.s.enmEffOpSize)
18638 {
18639 case IEMMODE_16BIT:
18640 IEM_MC_BEGIN(3, 1);
18641 IEM_MC_ARG(uint16_t, u16Sel, 0);
18642 IEM_MC_ARG(uint16_t, offSeg, 1);
18643 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18647 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18648 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18649 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18650 IEM_MC_END();
18651 return VINF_SUCCESS;
18652
18653 case IEMMODE_64BIT:
18654 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18655 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18656 * and call far qword [rsp] encodings. */
18657 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18658 {
18659 IEM_MC_BEGIN(3, 1);
18660 IEM_MC_ARG(uint16_t, u16Sel, 0);
18661 IEM_MC_ARG(uint64_t, offSeg, 1);
18662 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18666 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18667 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18668 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18669 IEM_MC_END();
18670 return VINF_SUCCESS;
18671 }
18672 /* AMD falls thru. */
18673 /* fall thru */
18674
18675 case IEMMODE_32BIT:
18676 IEM_MC_BEGIN(3, 1);
18677 IEM_MC_ARG(uint16_t, u16Sel, 0);
18678 IEM_MC_ARG(uint32_t, offSeg, 1);
18679 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18683 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18684 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18685 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18686 IEM_MC_END();
18687 return VINF_SUCCESS;
18688
18689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18690 }
18691}
18692
18693
18694/**
18695 * Opcode 0xff /3.
18696 * @param bRm The RM byte.
18697 */
18698FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18699{
18700 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18701 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18702}
18703
18704
18705/**
18706 * Opcode 0xff /4.
18707 * @param bRm The RM byte.
18708 */
18709FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18710{
18711 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18713
18714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18715 {
18716 /* The new RIP is taken from a register. */
18717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18718 switch (pVCpu->iem.s.enmEffOpSize)
18719 {
18720 case IEMMODE_16BIT:
18721 IEM_MC_BEGIN(0, 1);
18722 IEM_MC_LOCAL(uint16_t, u16Target);
18723 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18724 IEM_MC_SET_RIP_U16(u16Target);
18725 IEM_MC_END()
18726 return VINF_SUCCESS;
18727
18728 case IEMMODE_32BIT:
18729 IEM_MC_BEGIN(0, 1);
18730 IEM_MC_LOCAL(uint32_t, u32Target);
18731 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18732 IEM_MC_SET_RIP_U32(u32Target);
18733 IEM_MC_END()
18734 return VINF_SUCCESS;
18735
18736 case IEMMODE_64BIT:
18737 IEM_MC_BEGIN(0, 1);
18738 IEM_MC_LOCAL(uint64_t, u64Target);
18739 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18740 IEM_MC_SET_RIP_U64(u64Target);
18741 IEM_MC_END()
18742 return VINF_SUCCESS;
18743
18744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18745 }
18746 }
18747 else
18748 {
18749 /* The new RIP is taken from a memory location. */
18750 switch (pVCpu->iem.s.enmEffOpSize)
18751 {
18752 case IEMMODE_16BIT:
18753 IEM_MC_BEGIN(0, 2);
18754 IEM_MC_LOCAL(uint16_t, u16Target);
18755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18758 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18759 IEM_MC_SET_RIP_U16(u16Target);
18760 IEM_MC_END()
18761 return VINF_SUCCESS;
18762
18763 case IEMMODE_32BIT:
18764 IEM_MC_BEGIN(0, 2);
18765 IEM_MC_LOCAL(uint32_t, u32Target);
18766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18769 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18770 IEM_MC_SET_RIP_U32(u32Target);
18771 IEM_MC_END()
18772 return VINF_SUCCESS;
18773
18774 case IEMMODE_64BIT:
18775 IEM_MC_BEGIN(0, 2);
18776 IEM_MC_LOCAL(uint64_t, u64Target);
18777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18780 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18781 IEM_MC_SET_RIP_U64(u64Target);
18782 IEM_MC_END()
18783 return VINF_SUCCESS;
18784
18785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18786 }
18787 }
18788}
18789
18790
18791/**
18792 * Opcode 0xff /5.
18793 * @param bRm The RM byte.
18794 */
18795FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18796{
18797 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18798 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18799}
18800
18801
18802/**
18803 * Opcode 0xff /6.
18804 * @param bRm The RM byte.
18805 */
18806FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18807{
18808 IEMOP_MNEMONIC(push_Ev, "push Ev");
18809
18810 /* Registers are handled by a common worker. */
18811 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18812 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18813
18814 /* Memory we do here. */
18815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18816 switch (pVCpu->iem.s.enmEffOpSize)
18817 {
18818 case IEMMODE_16BIT:
18819 IEM_MC_BEGIN(0, 2);
18820 IEM_MC_LOCAL(uint16_t, u16Src);
18821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18824 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18825 IEM_MC_PUSH_U16(u16Src);
18826 IEM_MC_ADVANCE_RIP();
18827 IEM_MC_END();
18828 return VINF_SUCCESS;
18829
18830 case IEMMODE_32BIT:
18831 IEM_MC_BEGIN(0, 2);
18832 IEM_MC_LOCAL(uint32_t, u32Src);
18833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18836 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18837 IEM_MC_PUSH_U32(u32Src);
18838 IEM_MC_ADVANCE_RIP();
18839 IEM_MC_END();
18840 return VINF_SUCCESS;
18841
18842 case IEMMODE_64BIT:
18843 IEM_MC_BEGIN(0, 2);
18844 IEM_MC_LOCAL(uint64_t, u64Src);
18845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18848 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18849 IEM_MC_PUSH_U64(u64Src);
18850 IEM_MC_ADVANCE_RIP();
18851 IEM_MC_END();
18852 return VINF_SUCCESS;
18853
18854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18855 }
18856}
18857
18858
18859/** Opcode 0xff. */
18860FNIEMOP_DEF(iemOp_Grp5)
18861{
18862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18863 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18864 {
18865 case 0:
18866 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18867 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18868 case 1:
18869 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18870 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18871 case 2:
18872 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18873 case 3:
18874 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18875 case 4:
18876 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18877 case 5:
18878 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18879 case 6:
18880 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18881 case 7:
18882 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18883 return IEMOP_RAISE_INVALID_OPCODE();
18884 }
18885 AssertFailedReturn(VERR_IEM_IPE_3);
18886}
18887
18888
18889
18890const PFNIEMOP g_apfnOneByteMap[256] =
18891{
18892 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18893 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18894 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18895 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18896 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18897 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18898 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18899 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18900 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18901 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18902 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18903 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18904 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18905 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18906 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18907 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18908 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18909 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18910 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18911 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18912 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18913 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18914 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18915 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18916 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18917 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18918 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18919 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18920 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18921 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18922 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18923 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18924 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18925 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18926 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18927 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18928 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18929 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18930 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18931 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18932 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18933 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18934 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18935 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18936 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18937 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18938 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18939 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18940 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18941 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18942 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18943 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18944 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18945 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18946 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18947 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18948 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18949 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18950 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18951 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18952 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18953 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18954 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18955 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18956};
18957
18958
18959/** @} */
18960
18961#ifdef _MSC_VER
18962# pragma warning(pop)
18963#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette