VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65748

Last change on this file since 65748 was 65748, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x68, 0x0f 0x69, 0x0f 0x6a, and 0x0f 0x6d split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 665.5 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65748 2017-02-13 08:13:06Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
560 {
561 RTGCPTR GCPtrEff;
562 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
563 if (rcStrict != VINF_SUCCESS)
564 return rcStrict;
565 }
566#endif
567 IEMOP_HLP_DONE_DECODING();
568 }
569 return IEMOP_RAISE_INVALID_OPCODE();
570}
571
572
573/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
574 * immediate. */
575FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
576{
577 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
578 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
579 {
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
581#ifndef TST_IEM_CHECK_MC
582 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
583 {
584 RTGCPTR GCPtrEff;
585 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
586 if (rcStrict != VINF_SUCCESS)
587 return rcStrict;
588 }
589#endif
590 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
591 IEMOP_HLP_DONE_DECODING();
592 }
593 return IEMOP_RAISE_INVALID_OPCODE();
594}
595
596
597/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
598 * sequence. */
599FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
600{
601 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
603 {
604 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
606#ifndef TST_IEM_CHECK_MC
607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
608 {
609 RTGCPTR GCPtrEff;
610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
611 if (rcStrict != VINF_SUCCESS)
612 return rcStrict;
613 }
614#endif
615 IEMOP_HLP_DONE_DECODING();
616 }
617 return IEMOP_RAISE_INVALID_OPCODE();
618}
619
620
621/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
622 * a 8-byte immediate. */
623FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
624{
625 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
626 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
627 {
628 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
630#ifndef TST_IEM_CHECK_MC
631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
632 {
633 RTGCPTR GCPtrEff;
634 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
635 if (rcStrict != VINF_SUCCESS)
636 return rcStrict;
637 }
638#endif
639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
640 IEMOP_HLP_DONE_DECODING();
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643}
644
645
646
647/** @name ..... opcodes.
648 *
649 * @{
650 */
651
652/** @} */
653
654
655/** @name Two byte opcodes (first byte 0x0f).
656 *
657 * @{
658 */
659
660/** Opcode 0x0f 0x00 /0. */
661FNIEMOPRM_DEF(iemOp_Grp6_sldt)
662{
663 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
664 IEMOP_HLP_MIN_286();
665 IEMOP_HLP_NO_REAL_OR_V86_MODE();
666
667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
668 {
669 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
670 switch (pVCpu->iem.s.enmEffOpSize)
671 {
672 case IEMMODE_16BIT:
673 IEM_MC_BEGIN(0, 1);
674 IEM_MC_LOCAL(uint16_t, u16Ldtr);
675 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
676 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
677 IEM_MC_ADVANCE_RIP();
678 IEM_MC_END();
679 break;
680
681 case IEMMODE_32BIT:
682 IEM_MC_BEGIN(0, 1);
683 IEM_MC_LOCAL(uint32_t, u32Ldtr);
684 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
685 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
686 IEM_MC_ADVANCE_RIP();
687 IEM_MC_END();
688 break;
689
690 case IEMMODE_64BIT:
691 IEM_MC_BEGIN(0, 1);
692 IEM_MC_LOCAL(uint64_t, u64Ldtr);
693 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
694 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
695 IEM_MC_ADVANCE_RIP();
696 IEM_MC_END();
697 break;
698
699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
700 }
701 }
702 else
703 {
704 IEM_MC_BEGIN(0, 2);
705 IEM_MC_LOCAL(uint16_t, u16Ldtr);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
708 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
709 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
710 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
711 IEM_MC_ADVANCE_RIP();
712 IEM_MC_END();
713 }
714 return VINF_SUCCESS;
715}
716
717
718/** Opcode 0x0f 0x00 /1. */
719FNIEMOPRM_DEF(iemOp_Grp6_str)
720{
721 IEMOP_MNEMONIC(str, "str Rv/Mw");
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 switch (pVCpu->iem.s.enmEffOpSize)
729 {
730 case IEMMODE_16BIT:
731 IEM_MC_BEGIN(0, 1);
732 IEM_MC_LOCAL(uint16_t, u16Tr);
733 IEM_MC_FETCH_TR_U16(u16Tr);
734 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
735 IEM_MC_ADVANCE_RIP();
736 IEM_MC_END();
737 break;
738
739 case IEMMODE_32BIT:
740 IEM_MC_BEGIN(0, 1);
741 IEM_MC_LOCAL(uint32_t, u32Tr);
742 IEM_MC_FETCH_TR_U32(u32Tr);
743 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
744 IEM_MC_ADVANCE_RIP();
745 IEM_MC_END();
746 break;
747
748 case IEMMODE_64BIT:
749 IEM_MC_BEGIN(0, 1);
750 IEM_MC_LOCAL(uint64_t, u64Tr);
751 IEM_MC_FETCH_TR_U64(u64Tr);
752 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
753 IEM_MC_ADVANCE_RIP();
754 IEM_MC_END();
755 break;
756
757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
758 }
759 }
760 else
761 {
762 IEM_MC_BEGIN(0, 2);
763 IEM_MC_LOCAL(uint16_t, u16Tr);
764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
766 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
767 IEM_MC_FETCH_TR_U16(u16Tr);
768 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 return VINF_SUCCESS;
773}
774
775
776/** Opcode 0x0f 0x00 /2. */
777FNIEMOPRM_DEF(iemOp_Grp6_lldt)
778{
779 IEMOP_MNEMONIC(lldt, "lldt Ew");
780 IEMOP_HLP_MIN_286();
781 IEMOP_HLP_NO_REAL_OR_V86_MODE();
782
783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
784 {
785 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
786 IEM_MC_BEGIN(1, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 0);
788 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 else
793 {
794 IEM_MC_BEGIN(1, 1);
795 IEM_MC_ARG(uint16_t, u16Sel, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
799 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
800 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
802 IEM_MC_END();
803 }
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x00 /3. */
809FNIEMOPRM_DEF(iemOp_Grp6_ltr)
810{
811 IEMOP_MNEMONIC(ltr, "ltr Ew");
812 IEMOP_HLP_MIN_286();
813 IEMOP_HLP_NO_REAL_OR_V86_MODE();
814
815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
816 {
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_BEGIN(1, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 0);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 else
825 {
826 IEM_MC_BEGIN(1, 1);
827 IEM_MC_ARG(uint16_t, u16Sel, 0);
828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
832 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
833 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
834 IEM_MC_END();
835 }
836 return VINF_SUCCESS;
837}
838
839
840/** Opcode 0x0f 0x00 /3. */
841FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
842{
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_NO_REAL_OR_V86_MODE();
845
846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
847 {
848 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Sel, 0);
851 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
852 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 else
857 {
858 IEM_MC_BEGIN(2, 1);
859 IEM_MC_ARG(uint16_t, u16Sel, 0);
860 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
863 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
864 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
866 IEM_MC_END();
867 }
868 return VINF_SUCCESS;
869}
870
871
872/** Opcode 0x0f 0x00 /4. */
873FNIEMOPRM_DEF(iemOp_Grp6_verr)
874{
875 IEMOP_MNEMONIC(verr, "verr Ew");
876 IEMOP_HLP_MIN_286();
877 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
878}
879
880
881/** Opcode 0x0f 0x00 /5. */
882FNIEMOPRM_DEF(iemOp_Grp6_verw)
883{
884 IEMOP_MNEMONIC(verw, "verw Ew");
885 IEMOP_HLP_MIN_286();
886 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
887}
888
889
890/**
891 * Group 6 jump table.
892 */
893IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
894{
895 iemOp_Grp6_sldt,
896 iemOp_Grp6_str,
897 iemOp_Grp6_lldt,
898 iemOp_Grp6_ltr,
899 iemOp_Grp6_verr,
900 iemOp_Grp6_verw,
901 iemOp_InvalidWithRM,
902 iemOp_InvalidWithRM
903};
904
905/** Opcode 0x0f 0x00. */
906FNIEMOP_DEF(iemOp_Grp6)
907{
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
910}
911
912
913/** Opcode 0x0f 0x01 /0. */
914FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
915{
916 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
917 IEMOP_HLP_MIN_286();
918 IEMOP_HLP_64BIT_OP_SIZE();
919 IEM_MC_BEGIN(2, 1);
920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
925 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 /0. */
932FNIEMOP_DEF(iemOp_Grp7_vmcall)
933{
934 IEMOP_BITCH_ABOUT_STUB();
935 return IEMOP_RAISE_INVALID_OPCODE();
936}
937
938
939/** Opcode 0x0f 0x01 /0. */
940FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
941{
942 IEMOP_BITCH_ABOUT_STUB();
943 return IEMOP_RAISE_INVALID_OPCODE();
944}
945
946
947/** Opcode 0x0f 0x01 /0. */
948FNIEMOP_DEF(iemOp_Grp7_vmresume)
949{
950 IEMOP_BITCH_ABOUT_STUB();
951 return IEMOP_RAISE_INVALID_OPCODE();
952}
953
954
955/** Opcode 0x0f 0x01 /0. */
956FNIEMOP_DEF(iemOp_Grp7_vmxoff)
957{
958 IEMOP_BITCH_ABOUT_STUB();
959 return IEMOP_RAISE_INVALID_OPCODE();
960}
961
962
963/** Opcode 0x0f 0x01 /1. */
964FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
965{
966 IEMOP_MNEMONIC(sidt, "sidt Ms");
967 IEMOP_HLP_MIN_286();
968 IEMOP_HLP_64BIT_OP_SIZE();
969 IEM_MC_BEGIN(2, 1);
970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
971 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
975 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
976 IEM_MC_END();
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /1. */
982FNIEMOP_DEF(iemOp_Grp7_monitor)
983{
984 IEMOP_MNEMONIC(monitor, "monitor");
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
986 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
987}
988
989
990/** Opcode 0x0f 0x01 /1. */
991FNIEMOP_DEF(iemOp_Grp7_mwait)
992{
993 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
996}
997
998
999/** Opcode 0x0f 0x01 /2. */
1000FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1001{
1002 IEMOP_MNEMONIC(lgdt, "lgdt");
1003 IEMOP_HLP_64BIT_OP_SIZE();
1004 IEM_MC_BEGIN(3, 1);
1005 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1006 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1007 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1011 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1012 IEM_MC_END();
1013 return VINF_SUCCESS;
1014}
1015
1016
1017/** Opcode 0x0f 0x01 0xd0. */
1018FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1019{
1020 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1021 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x0f 0x01 0xd1. */
1031FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1032{
1033 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1034 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1035 {
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1037 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1038 }
1039 return IEMOP_RAISE_INVALID_OPCODE();
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /3. */
1044FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1045{
1046 IEMOP_MNEMONIC(lidt, "lidt");
1047 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1048 ? IEMMODE_64BIT
1049 : pVCpu->iem.s.enmEffOpSize;
1050 IEM_MC_BEGIN(3, 1);
1051 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1057 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1058 IEM_MC_END();
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/** Opcode 0x0f 0x01 0xd8. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1065
1066/** Opcode 0x0f 0x01 0xd9. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1068
1069/** Opcode 0x0f 0x01 0xda. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1071
1072/** Opcode 0x0f 0x01 0xdb. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1074
1075/** Opcode 0x0f 0x01 0xdc. */
1076FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1077
1078/** Opcode 0x0f 0x01 0xdd. */
1079FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1080
1081/** Opcode 0x0f 0x01 0xde. */
1082FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1083
1084/** Opcode 0x0f 0x01 0xdf. */
1085FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1086
1087/** Opcode 0x0f 0x01 /4. */
1088FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1089{
1090 IEMOP_MNEMONIC(smsw, "smsw");
1091 IEMOP_HLP_MIN_286();
1092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1093 {
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 switch (pVCpu->iem.s.enmEffOpSize)
1096 {
1097 case IEMMODE_16BIT:
1098 IEM_MC_BEGIN(0, 1);
1099 IEM_MC_LOCAL(uint16_t, u16Tmp);
1100 IEM_MC_FETCH_CR0_U16(u16Tmp);
1101 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1102 { /* likely */ }
1103 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1104 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1105 else
1106 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1107 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 return VINF_SUCCESS;
1111
1112 case IEMMODE_32BIT:
1113 IEM_MC_BEGIN(0, 1);
1114 IEM_MC_LOCAL(uint32_t, u32Tmp);
1115 IEM_MC_FETCH_CR0_U32(u32Tmp);
1116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 return VINF_SUCCESS;
1120
1121 case IEMMODE_64BIT:
1122 IEM_MC_BEGIN(0, 1);
1123 IEM_MC_LOCAL(uint64_t, u64Tmp);
1124 IEM_MC_FETCH_CR0_U64(u64Tmp);
1125 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1126 IEM_MC_ADVANCE_RIP();
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129
1130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1131 }
1132 }
1133 else
1134 {
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(0, 2);
1137 IEM_MC_LOCAL(uint16_t, u16Tmp);
1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_FETCH_CR0_U16(u16Tmp);
1142 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1143 { /* likely */ }
1144 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1145 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1146 else
1147 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 return VINF_SUCCESS;
1152 }
1153}
1154
1155
1156/** Opcode 0x0f 0x01 /6. */
1157FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1158{
1159 /* The operand size is effectively ignored, all is 16-bit and only the
1160 lower 3-bits are used. */
1161 IEMOP_MNEMONIC(lmsw, "lmsw");
1162 IEMOP_HLP_MIN_286();
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(1, 0);
1167 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1168 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1170 IEM_MC_END();
1171 }
1172 else
1173 {
1174 IEM_MC_BEGIN(1, 1);
1175 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1179 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1180 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x01 /7. */
1188FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1189{
1190 IEMOP_MNEMONIC(invlpg, "invlpg");
1191 IEMOP_HLP_MIN_486();
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(1, 1);
1194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1196 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199}
1200
1201
1202/** Opcode 0x0f 0x01 /7. */
1203FNIEMOP_DEF(iemOp_Grp7_swapgs)
1204{
1205 IEMOP_MNEMONIC(swapgs, "swapgs");
1206 IEMOP_HLP_ONLY_64BIT();
1207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1209}
1210
1211
1212/** Opcode 0x0f 0x01 /7. */
1213FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1214{
1215 NOREF(pVCpu);
1216 IEMOP_BITCH_ABOUT_STUB();
1217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1218}
1219
1220
1221/** Opcode 0x0f 0x01. */
1222FNIEMOP_DEF(iemOp_Grp7)
1223{
1224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1226 {
1227 case 0:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1233 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1234 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1235 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1236 }
1237 return IEMOP_RAISE_INVALID_OPCODE();
1238
1239 case 1:
1240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1241 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1242 switch (bRm & X86_MODRM_RM_MASK)
1243 {
1244 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1245 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1246 }
1247 return IEMOP_RAISE_INVALID_OPCODE();
1248
1249 case 2:
1250 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1251 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1252 switch (bRm & X86_MODRM_RM_MASK)
1253 {
1254 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1255 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1256 }
1257 return IEMOP_RAISE_INVALID_OPCODE();
1258
1259 case 3:
1260 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1261 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1262 switch (bRm & X86_MODRM_RM_MASK)
1263 {
1264 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1265 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1266 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1267 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1268 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1269 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1270 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1271 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1273 }
1274
1275 case 4:
1276 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1277
1278 case 5:
1279 return IEMOP_RAISE_INVALID_OPCODE();
1280
1281 case 6:
1282 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1283
1284 case 7:
1285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1286 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1287 switch (bRm & X86_MODRM_RM_MASK)
1288 {
1289 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1290 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1291 }
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296}
1297
1298/** Opcode 0x0f 0x00 /3. */
1299FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1300{
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1303
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1307 switch (pVCpu->iem.s.enmEffOpSize)
1308 {
1309 case IEMMODE_16BIT:
1310 {
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Sel, 1);
1314 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1315
1316 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1318 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1319
1320 IEM_MC_END();
1321 return VINF_SUCCESS;
1322 }
1323
1324 case IEMMODE_32BIT:
1325 case IEMMODE_64BIT:
1326 {
1327 IEM_MC_BEGIN(3, 0);
1328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Sel, 1);
1330 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1331
1332 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1333 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1334 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1335
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1341 }
1342 }
1343 else
1344 {
1345 switch (pVCpu->iem.s.enmEffOpSize)
1346 {
1347 case IEMMODE_16BIT:
1348 {
1349 IEM_MC_BEGIN(3, 1);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Sel, 1);
1352 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1354
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1356 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1357
1358 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1361
1362 IEM_MC_END();
1363 return VINF_SUCCESS;
1364 }
1365
1366 case IEMMODE_32BIT:
1367 case IEMMODE_64BIT:
1368 {
1369 IEM_MC_BEGIN(3, 1);
1370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1371 IEM_MC_ARG(uint16_t, u16Sel, 1);
1372 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1377/** @todo testcase: make sure it's a 16-bit read. */
1378
1379 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1380 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1381 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1382
1383 IEM_MC_END();
1384 return VINF_SUCCESS;
1385 }
1386
1387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1388 }
1389 }
1390}
1391
1392
1393
1394/** Opcode 0x0f 0x02. */
1395FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1396{
1397 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1398 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1399}
1400
1401
1402/** Opcode 0x0f 0x03. */
1403FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1404{
1405 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1406 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1407}
1408
1409
1410/** Opcode 0x0f 0x05. */
1411FNIEMOP_DEF(iemOp_syscall)
1412{
1413 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1416}
1417
1418
1419/** Opcode 0x0f 0x06. */
1420FNIEMOP_DEF(iemOp_clts)
1421{
1422 IEMOP_MNEMONIC(clts, "clts");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1425}
1426
1427
1428/** Opcode 0x0f 0x07. */
1429FNIEMOP_DEF(iemOp_sysret)
1430{
1431 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1434}
1435
1436
1437/** Opcode 0x0f 0x08. */
1438FNIEMOP_STUB(iemOp_invd);
1439// IEMOP_HLP_MIN_486();
1440
1441
1442/** Opcode 0x0f 0x09. */
1443FNIEMOP_DEF(iemOp_wbinvd)
1444{
1445 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1446 IEMOP_HLP_MIN_486();
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS; /* ignore for now */
1453}
1454
1455
1456/** Opcode 0x0f 0x0b. */
1457FNIEMOP_DEF(iemOp_ud2)
1458{
1459 IEMOP_MNEMONIC(ud2, "ud2");
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462
1463/** Opcode 0x0f 0x0d. */
1464FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1465{
1466 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1468 {
1469 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1477 return IEMOP_RAISE_INVALID_OPCODE();
1478 }
1479
1480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1481 {
1482 case 2: /* Aliased to /0 for the time being. */
1483 case 4: /* Aliased to /0 for the time being. */
1484 case 5: /* Aliased to /0 for the time being. */
1485 case 6: /* Aliased to /0 for the time being. */
1486 case 7: /* Aliased to /0 for the time being. */
1487 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1488 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1489 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1491 }
1492
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 /* Currently a NOP. */
1498 NOREF(GCPtrEffSrc);
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 return VINF_SUCCESS;
1502}
1503
1504
1505/** Opcode 0x0f 0x0e. */
1506FNIEMOP_STUB(iemOp_femms);
1507
1508
1509/** Opcode 0x0f 0x0f 0x0c. */
1510FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x0d. */
1513FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x1c. */
1516FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x1d. */
1519FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x8a. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x8e. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x90. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x94. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1532
1533/** Opcode 0x0f 0x0f 0x96. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0x97. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0x9a. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0x9e. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xa0. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xa4. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xa6. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xa7. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xaa. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xae. */
1561FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xb0. */
1564FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xb4. */
1567FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1568
1569/** Opcode 0x0f 0x0f 0xb6. */
1570FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1571
1572/** Opcode 0x0f 0x0f 0xb7. */
1573FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1574
1575/** Opcode 0x0f 0x0f 0xbb. */
1576FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1577
1578/** Opcode 0x0f 0x0f 0xbf. */
1579FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1580
1581
1582/** Opcode 0x0f 0x0f. */
1583FNIEMOP_DEF(iemOp_3Dnow)
1584{
1585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1586 {
1587 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 }
1590
1591 /* This is pretty sparse, use switch instead of table. */
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 switch (b)
1594 {
1595 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1596 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1597 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1598 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1599 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1600 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1601 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1602 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1603 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1604 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1605 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1606 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1607 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1608 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1609 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1610 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1611 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1612 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1613 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1614 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1615 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1616 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1617 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1618 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622}
1623
1624
1625/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1626FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1627/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1628FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1629/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1630FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1631/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1632FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1633
1634
1635/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1636FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1637{
1638 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1646 IEM_MC_BEGIN(0, 0);
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1651 IEM_MC_ADVANCE_RIP();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 /*
1657 * Memory, register.
1658 */
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1679FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1680
1681/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1682FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1683
1684/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1685FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1686{
1687 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1690 {
1691 /*
1692 * Register, register.
1693 */
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697
1698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1700 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1701 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1702
1703 IEM_MC_ADVANCE_RIP();
1704 IEM_MC_END();
1705 }
1706 else
1707 {
1708 /*
1709 * Memory, register.
1710 */
1711 IEM_MC_BEGIN(0, 2);
1712 IEM_MC_LOCAL(uint64_t, uSrc);
1713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1714
1715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1719
1720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1722
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1732
1733/** Opcode 0x66 0x0f 0x12. */
1734FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1735
1736/** Opcode 0xf3 0x0f 0x12. */
1737FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1738
1739/** Opcode 0xf2 0x0f 0x12. */
1740FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1741
1742/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1743FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1744
1745/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1746FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1747{
1748 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752#if 0
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1757 IEM_MC_BEGIN(0, 1);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1761 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765#else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767#endif
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/* Opcode 0xf3 0x0f 0x13 - invalid */
1793/* Opcode 0xf2 0x0f 0x13 - invalid */
1794
1795/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1796FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x14 - invalid */
1800/* Opcode 0xf2 0x0f 0x14 - invalid */
1801/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1802FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1803/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1804FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1805/* Opcode 0xf3 0x0f 0x15 - invalid */
1806/* Opcode 0xf2 0x0f 0x15 - invalid */
1807/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1808FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1809/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1810FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1811/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1812FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1813/* Opcode 0xf2 0x0f 0x16 - invalid */
1814/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1815FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1816/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1817FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1818/* Opcode 0xf3 0x0f 0x17 - invalid */
1819/* Opcode 0xf2 0x0f 0x17 - invalid */
1820
1821
1822/** Opcode 0x0f 0x18. */
1823FNIEMOP_DEF(iemOp_prefetch_Grp16)
1824{
1825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1826 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1827 {
1828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1829 {
1830 case 4: /* Aliased to /0 for the time being according to AMD. */
1831 case 5: /* Aliased to /0 for the time being according to AMD. */
1832 case 6: /* Aliased to /0 for the time being according to AMD. */
1833 case 7: /* Aliased to /0 for the time being according to AMD. */
1834 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1835 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1836 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1837 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1839 }
1840
1841 IEM_MC_BEGIN(0, 1);
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 /* Currently a NOP. */
1846 NOREF(GCPtrEffSrc);
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 return VINF_SUCCESS;
1850 }
1851
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/** Opcode 0x0f 0x19..0x1f. */
1857FNIEMOP_DEF(iemOp_nop_Ev)
1858{
1859 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 IEM_MC_BEGIN(0, 1);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 /* Currently a NOP. */
1875 NOREF(GCPtrEffSrc);
1876 IEM_MC_ADVANCE_RIP();
1877 IEM_MC_END();
1878 }
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** Opcode 0x0f 0x20. */
1884FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1885{
1886 /* mod is ignored, as is operand size overrides. */
1887 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1888 IEMOP_HLP_MIN_386();
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1891 else
1892 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1893
1894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1895 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1897 {
1898 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1900 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1901 iCrReg |= 8;
1902 }
1903 switch (iCrReg)
1904 {
1905 case 0: case 2: case 3: case 4: case 8:
1906 break;
1907 default:
1908 return IEMOP_RAISE_INVALID_OPCODE();
1909 }
1910 IEMOP_HLP_DONE_DECODING();
1911
1912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1913}
1914
1915
1916/** Opcode 0x0f 0x21. */
1917FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1918{
1919 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1920 IEMOP_HLP_MIN_386();
1921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1924 return IEMOP_RAISE_INVALID_OPCODE();
1925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1926 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1927 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1928}
1929
1930
1931/** Opcode 0x0f 0x22. */
1932FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1933{
1934 /* mod is ignored, as is operand size overrides. */
1935 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1936 IEMOP_HLP_MIN_386();
1937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1938 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1939 else
1940 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1941
1942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1943 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1945 {
1946 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1948 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1949 iCrReg |= 8;
1950 }
1951 switch (iCrReg)
1952 {
1953 case 0: case 2: case 3: case 4: case 8:
1954 break;
1955 default:
1956 return IEMOP_RAISE_INVALID_OPCODE();
1957 }
1958 IEMOP_HLP_DONE_DECODING();
1959
1960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1961}
1962
1963
1964/** Opcode 0x0f 0x23. */
1965FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1966{
1967 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1968 IEMOP_HLP_MIN_386();
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1972 return IEMOP_RAISE_INVALID_OPCODE();
1973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1974 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1975 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1976}
1977
1978
1979/** Opcode 0x0f 0x24. */
1980FNIEMOP_DEF(iemOp_mov_Rd_Td)
1981{
1982 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1983 /** @todo works on 386 and 486. */
1984 /* The RM byte is not considered, see testcase. */
1985 return IEMOP_RAISE_INVALID_OPCODE();
1986}
1987
1988
1989/** Opcode 0x0f 0x26. */
1990FNIEMOP_DEF(iemOp_mov_Td_Rd)
1991{
1992 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1993 /** @todo works on 386 and 486. */
1994 /* The RM byte is not considered, see testcase. */
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
2000FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 /*
2007 * Register, register.
2008 */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_BEGIN(0, 0);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2042FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2043{
2044 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2056 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081}
2082
2083/* Opcode 0xf3 0x0f 0x28 - invalid */
2084/* Opcode 0xf2 0x0f 0x28 - invalid */
2085
2086/** Opcode 0x0f 0x29. */
2087FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2088{
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2091 else
2092 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2095 {
2096 /*
2097 * Register, register.
2098 */
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2100 IEM_MC_BEGIN(0, 0);
2101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 else
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2106 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2107 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Memory, register.
2115 */
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2124 else
2125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2127
2128 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2129 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135}
2136
2137
2138/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2139FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2140/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2141FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2142/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2143FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2144/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2145FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2146
2147
2148/** Opcode 0x0f 0x2b. */
2149FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2150{
2151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2152 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2153 else
2154 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2157 {
2158 /*
2159 * memory, register.
2160 */
2161 IEM_MC_BEGIN(0, 2);
2162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 else
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172
2173 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2187FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2188/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2189FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2190/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2191FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2192/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2193FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2194
2195/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2196FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2197/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2198FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2199/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2200FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2201/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2202FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2203
2204/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2205FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2206/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2207FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2208/* Opcode 0xf3 0x0f 0x2e - invalid */
2209/* Opcode 0xf2 0x0f 0x2e - invalid */
2210
2211/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2212FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2213/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2214FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2215/* Opcode 0xf3 0x0f 0x2f - invalid */
2216/* Opcode 0xf2 0x0f 0x2f - invalid */
2217
2218/** Opcode 0x0f 0x30. */
2219FNIEMOP_DEF(iemOp_wrmsr)
2220{
2221 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2224}
2225
2226
2227/** Opcode 0x0f 0x31. */
2228FNIEMOP_DEF(iemOp_rdtsc)
2229{
2230 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2233}
2234
2235
2236/** Opcode 0x0f 0x33. */
2237FNIEMOP_DEF(iemOp_rdmsr)
2238{
2239 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2242}
2243
2244
2245/** Opcode 0x0f 0x34. */
2246FNIEMOP_STUB(iemOp_rdpmc);
2247/** Opcode 0x0f 0x34. */
2248FNIEMOP_STUB(iemOp_sysenter);
2249/** Opcode 0x0f 0x35. */
2250FNIEMOP_STUB(iemOp_sysexit);
2251/** Opcode 0x0f 0x37. */
2252FNIEMOP_STUB(iemOp_getsec);
2253/** Opcode 0x0f 0x38. */
2254FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2255/** Opcode 0x0f 0x3a. */
2256FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2257
2258
2259/**
2260 * Implements a conditional move.
2261 *
2262 * Wish there was an obvious way to do this where we could share and reduce
2263 * code bloat.
2264 *
2265 * @param a_Cnd The conditional "microcode" operation.
2266 */
2267#define CMOV_X(a_Cnd) \
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2270 { \
2271 switch (pVCpu->iem.s.enmEffOpSize) \
2272 { \
2273 case IEMMODE_16BIT: \
2274 IEM_MC_BEGIN(0, 1); \
2275 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2276 a_Cnd { \
2277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 1); \
2286 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2287 a_Cnd { \
2288 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2289 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2290 } IEM_MC_ELSE() { \
2291 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2292 } IEM_MC_ENDIF(); \
2293 IEM_MC_ADVANCE_RIP(); \
2294 IEM_MC_END(); \
2295 return VINF_SUCCESS; \
2296 \
2297 case IEMMODE_64BIT: \
2298 IEM_MC_BEGIN(0, 1); \
2299 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2300 a_Cnd { \
2301 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2303 } IEM_MC_ENDIF(); \
2304 IEM_MC_ADVANCE_RIP(); \
2305 IEM_MC_END(); \
2306 return VINF_SUCCESS; \
2307 \
2308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2309 } \
2310 } \
2311 else \
2312 { \
2313 switch (pVCpu->iem.s.enmEffOpSize) \
2314 { \
2315 case IEMMODE_16BIT: \
2316 IEM_MC_BEGIN(0, 2); \
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2318 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2320 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2321 a_Cnd { \
2322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2323 } IEM_MC_ENDIF(); \
2324 IEM_MC_ADVANCE_RIP(); \
2325 IEM_MC_END(); \
2326 return VINF_SUCCESS; \
2327 \
2328 case IEMMODE_32BIT: \
2329 IEM_MC_BEGIN(0, 2); \
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2331 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2333 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2334 a_Cnd { \
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2336 } IEM_MC_ELSE() { \
2337 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2338 } IEM_MC_ENDIF(); \
2339 IEM_MC_ADVANCE_RIP(); \
2340 IEM_MC_END(); \
2341 return VINF_SUCCESS; \
2342 \
2343 case IEMMODE_64BIT: \
2344 IEM_MC_BEGIN(0, 2); \
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2346 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2349 a_Cnd { \
2350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2351 } IEM_MC_ENDIF(); \
2352 IEM_MC_ADVANCE_RIP(); \
2353 IEM_MC_END(); \
2354 return VINF_SUCCESS; \
2355 \
2356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2357 } \
2358 } do {} while (0)
2359
2360
2361
2362/** Opcode 0x0f 0x40. */
2363FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2364{
2365 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2367}
2368
2369
2370/** Opcode 0x0f 0x41. */
2371FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2372{
2373 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2375}
2376
2377
2378/** Opcode 0x0f 0x42. */
2379FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2380{
2381 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2382 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2383}
2384
2385
2386/** Opcode 0x0f 0x43. */
2387FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2388{
2389 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2390 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2391}
2392
2393
2394/** Opcode 0x0f 0x44. */
2395FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2396{
2397 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2399}
2400
2401
2402/** Opcode 0x0f 0x45. */
2403FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2404{
2405 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2407}
2408
2409
2410/** Opcode 0x0f 0x46. */
2411FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2412{
2413 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2414 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2415}
2416
2417
2418/** Opcode 0x0f 0x47. */
2419FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2420{
2421 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2422 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2423}
2424
2425
2426/** Opcode 0x0f 0x48. */
2427FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2428{
2429 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2430 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2431}
2432
2433
2434/** Opcode 0x0f 0x49. */
2435FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2436{
2437 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2438 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2439}
2440
2441
2442/** Opcode 0x0f 0x4a. */
2443FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2446 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2447}
2448
2449
2450/** Opcode 0x0f 0x4b. */
2451FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2452{
2453 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2455}
2456
2457
2458/** Opcode 0x0f 0x4c. */
2459FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2460{
2461 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2462 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2463}
2464
2465
2466/** Opcode 0x0f 0x4d. */
2467FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2468{
2469 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2470 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2471}
2472
2473
2474/** Opcode 0x0f 0x4e. */
2475FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2476{
2477 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2478 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2479}
2480
2481
2482/** Opcode 0x0f 0x4f. */
2483FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2484{
2485 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2486 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2487}
2488
2489#undef CMOV_X
2490
2491/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2492FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2493/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2494FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2495/* Opcode 0xf3 0x0f 0x50 - invalid */
2496/* Opcode 0xf2 0x0f 0x50 - invalid */
2497
2498/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2499FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2500/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2501FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2502/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2503FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2504/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2505FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2506
2507/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2508FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2509/* Opcode 0x66 0x0f 0x52 - invalid */
2510/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2511FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2512/* Opcode 0xf2 0x0f 0x52 - invalid */
2513
2514/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2515FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2516/* Opcode 0x66 0x0f 0x53 - invalid */
2517/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2518FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2519/* Opcode 0xf2 0x0f 0x53 - invalid */
2520
2521/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2522FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2523/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2524FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2525/* Opcode 0xf3 0x0f 0x54 - invalid */
2526/* Opcode 0xf2 0x0f 0x54 - invalid */
2527
2528/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2529FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2530/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2531FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2532/* Opcode 0xf3 0x0f 0x55 - invalid */
2533/* Opcode 0xf2 0x0f 0x55 - invalid */
2534
2535/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2536FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2537/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2538FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2539/* Opcode 0xf3 0x0f 0x56 - invalid */
2540/* Opcode 0xf2 0x0f 0x56 - invalid */
2541
2542/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2543FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2544/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2545FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2546/* Opcode 0xf3 0x0f 0x57 - invalid */
2547/* Opcode 0xf2 0x0f 0x57 - invalid */
2548
2549/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2568FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2569/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2570FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2571/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2572FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2573/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2574FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2575
2576/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2577FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2578/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2579FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2580/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2581FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2582/* Opcode 0xf2 0x0f 0x5b - invalid */
2583
2584/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2585FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2586/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2587FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2588/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2589FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2590/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2591FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2592
2593/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2594FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2595/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2596FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2597/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2598FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2599/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2600FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2601
2602/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2603FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2604/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2605FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2606/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2607FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2608/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2609FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2610
2611/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2612FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2613/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2614FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2615/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2616FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2617/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2618FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2619
2620/**
2621 * Common worker for MMX instructions on the forms:
2622 * pxxxx mm1, mm2/mem32
2623 *
2624 * The 2nd operand is the first half of a register, which in the memory case
2625 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2626 * memory accessed for MMX.
2627 *
2628 * Exceptions type 4.
2629 */
2630FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 {
2635 /*
2636 * Register, register.
2637 */
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2639 IEM_MC_BEGIN(2, 0);
2640 IEM_MC_ARG(uint128_t *, pDst, 0);
2641 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2643 IEM_MC_PREPARE_SSE_USAGE();
2644 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2645 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2646 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2647 IEM_MC_ADVANCE_RIP();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 /*
2653 * Register, memory.
2654 */
2655 IEM_MC_BEGIN(2, 2);
2656 IEM_MC_ARG(uint128_t *, pDst, 0);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2660
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665
2666 IEM_MC_PREPARE_SSE_USAGE();
2667 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2668 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2669
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674}
2675
2676
2677/**
2678 * Common worker for SSE2 instructions on the forms:
2679 * pxxxx xmm1, xmm2/mem128
2680 *
2681 * The 2nd operand is the first half of a register, which in the memory case
2682 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2683 * memory accessed for MMX.
2684 *
2685 * Exceptions type 4.
2686 */
2687FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2688{
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (!pImpl->pfnU64)
2691 return IEMOP_RAISE_INVALID_OPCODE();
2692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2693 {
2694 /*
2695 * Register, register.
2696 */
2697 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2698 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(2, 0);
2701 IEM_MC_ARG(uint64_t *, pDst, 0);
2702 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_PREPARE_FPU_USAGE();
2705 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2706 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2707 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * Register, memory.
2715 */
2716 IEM_MC_BEGIN(2, 2);
2717 IEM_MC_ARG(uint64_t *, pDst, 0);
2718 IEM_MC_LOCAL(uint32_t, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2721
2722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2725 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726
2727 IEM_MC_PREPARE_FPU_USAGE();
2728 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2729 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2730
2731 IEM_MC_ADVANCE_RIP();
2732 IEM_MC_END();
2733 }
2734 return VINF_SUCCESS;
2735}
2736
2737
2738/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2739FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2740{
2741 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2742 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2743}
2744
2745/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2746FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2747{
2748 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2749 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2750}
2751
2752/* Opcode 0xf3 0x0f 0x60 - invalid */
2753
2754
2755/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2756FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2757{
2758 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2759 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2760}
2761
2762/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2763FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2764{
2765 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2766 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2767}
2768
2769/* Opcode 0xf3 0x0f 0x61 - invalid */
2770
2771
2772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2774{
2775 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2777}
2778
2779/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2780FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2781{
2782 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2783 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2784}
2785
2786/* Opcode 0xf3 0x0f 0x62 - invalid */
2787
2788
2789
2790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2791FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x63 - invalid */
2795
2796/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x64 - invalid */
2801
2802/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2805FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2806/* Opcode 0xf3 0x0f 0x65 - invalid */
2807
2808/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2809FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2811FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2812/* Opcode 0xf3 0x0f 0x66 - invalid */
2813
2814/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2815FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2816/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2818/* Opcode 0xf3 0x0f 0x67 - invalid */
2819
2820
2821/**
2822 * Common worker for MMX instructions on the form:
2823 * pxxxx mm1, mm2/mem64
2824 *
2825 * The 2nd operand is the second half of a register, which in the memory case
2826 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2827 * where it may read the full 128 bits or only the upper 64 bits.
2828 *
2829 * Exceptions type 4.
2830 */
2831FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2832{
2833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2834 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /*
2838 * Register, register.
2839 */
2840 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2841 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEM_MC_BEGIN(2, 0);
2844 IEM_MC_ARG(uint64_t *, pDst, 0);
2845 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2846 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2847 IEM_MC_PREPARE_FPU_USAGE();
2848 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2849 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2850 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 else
2855 {
2856 /*
2857 * Register, memory.
2858 */
2859 IEM_MC_BEGIN(2, 2);
2860 IEM_MC_ARG(uint64_t *, pDst, 0);
2861 IEM_MC_LOCAL(uint64_t, uSrc);
2862 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2864
2865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2868 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2869
2870 IEM_MC_PREPARE_FPU_USAGE();
2871 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2872 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2873
2874 IEM_MC_ADVANCE_RIP();
2875 IEM_MC_END();
2876 }
2877 return VINF_SUCCESS;
2878}
2879
2880
2881/**
2882 * Common worker for SSE2 instructions on the form:
2883 * pxxxx xmm1, xmm2/mem128
2884 *
2885 * The 2nd operand is the second half of a register, which in the memory case
2886 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2887 * where it may read the full 128 bits or only the upper 64 bits.
2888 *
2889 * Exceptions type 4.
2890 */
2891FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2892{
2893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2895 {
2896 /*
2897 * Register, register.
2898 */
2899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2900 IEM_MC_BEGIN(2, 0);
2901 IEM_MC_ARG(uint128_t *, pDst, 0);
2902 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2904 IEM_MC_PREPARE_SSE_USAGE();
2905 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2906 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2907 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2908 IEM_MC_ADVANCE_RIP();
2909 IEM_MC_END();
2910 }
2911 else
2912 {
2913 /*
2914 * Register, memory.
2915 */
2916 IEM_MC_BEGIN(2, 2);
2917 IEM_MC_ARG(uint128_t *, pDst, 0);
2918 IEM_MC_LOCAL(uint128_t, uSrc);
2919 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2921
2922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2924 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2925 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2926
2927 IEM_MC_PREPARE_SSE_USAGE();
2928 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2929 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2939FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2940{
2941 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2942 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2943}
2944
2945/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2946FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2947{
2948 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2949 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2950}
2951/* Opcode 0xf3 0x0f 0x68 - invalid */
2952
2953
2954/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2955FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2956{
2957 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2958 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2959}
2960
2961/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2962FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2963{
2964 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2965 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2966
2967}
2968/* Opcode 0xf3 0x0f 0x69 - invalid */
2969
2970
2971/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2972FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2973{
2974 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2975 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2976}
2977
2978/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2979FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2980{
2981 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2982 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2983}
2984/* Opcode 0xf3 0x0f 0x6a - invalid */
2985
2986
2987/** Opcode 0x0f 0x6b. */
2988FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2989
2990
2991/* Opcode 0x0f 0x6c - invalid */
2992
2993/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2994FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2995{
2996 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2997 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2998}
2999
3000/* Opcode 0xf3 0x0f 0x6c - invalid */
3001/* Opcode 0xf2 0x0f 0x6c - invalid */
3002
3003
3004/* Opcode 0x0f 0x6d - invalid */
3005
3006/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
3007FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3008{
3009 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
3010 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3011}
3012
3013/* Opcode 0xf3 0x0f 0x6d - invalid */
3014
3015
3016
3017/** Opcode 0x0f 0x6e. */
3018FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
3019{
3020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3021 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3022 {
3023 case IEM_OP_PRF_SIZE_OP: /* SSE */
3024 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3025 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3026 else
3027 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3029 {
3030 /* XMM, greg*/
3031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3032 IEM_MC_BEGIN(0, 1);
3033 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3034 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3035 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3036 {
3037 IEM_MC_LOCAL(uint64_t, u64Tmp);
3038 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3039 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3040 }
3041 else
3042 {
3043 IEM_MC_LOCAL(uint32_t, u32Tmp);
3044 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3045 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3046 }
3047 IEM_MC_ADVANCE_RIP();
3048 IEM_MC_END();
3049 }
3050 else
3051 {
3052 /* XMM, [mem] */
3053 IEM_MC_BEGIN(0, 2);
3054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3055 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3058 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3059 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3060 {
3061 IEM_MC_LOCAL(uint64_t, u64Tmp);
3062 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3063 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3064 }
3065 else
3066 {
3067 IEM_MC_LOCAL(uint32_t, u32Tmp);
3068 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3069 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3070 }
3071 IEM_MC_ADVANCE_RIP();
3072 IEM_MC_END();
3073 }
3074 return VINF_SUCCESS;
3075
3076 case 0: /* MMX */
3077 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3078 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3079 else
3080 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3082 {
3083 /* MMX, greg */
3084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3085 IEM_MC_BEGIN(0, 1);
3086 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3087 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3088 IEM_MC_LOCAL(uint64_t, u64Tmp);
3089 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3090 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3091 else
3092 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3093 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3094 IEM_MC_ADVANCE_RIP();
3095 IEM_MC_END();
3096 }
3097 else
3098 {
3099 /* MMX, [mem] */
3100 IEM_MC_BEGIN(0, 2);
3101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3102 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3105 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3106 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3107 {
3108 IEM_MC_LOCAL(uint64_t, u64Tmp);
3109 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3110 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3111 }
3112 else
3113 {
3114 IEM_MC_LOCAL(uint32_t, u32Tmp);
3115 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3116 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3117 }
3118 IEM_MC_ADVANCE_RIP();
3119 IEM_MC_END();
3120 }
3121 return VINF_SUCCESS;
3122
3123 default:
3124 return IEMOP_RAISE_INVALID_OPCODE();
3125 }
3126}
3127
3128
3129/** Opcode 0x0f 0x6f. */
3130FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
3131{
3132 bool fAligned = false;
3133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3134 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3135 {
3136 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3137 fAligned = true;
3138 /* fall thru */
3139 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3140 if (fAligned)
3141 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3142 else
3143 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3144 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3145 {
3146 /*
3147 * Register, register.
3148 */
3149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3150 IEM_MC_BEGIN(0, 0);
3151 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3152 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3153 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3154 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3155 IEM_MC_ADVANCE_RIP();
3156 IEM_MC_END();
3157 }
3158 else
3159 {
3160 /*
3161 * Register, memory.
3162 */
3163 IEM_MC_BEGIN(0, 2);
3164 IEM_MC_LOCAL(uint128_t, u128Tmp);
3165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3166
3167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3169 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3170 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3171 if (fAligned)
3172 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3173 else
3174 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3175 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3176
3177 IEM_MC_ADVANCE_RIP();
3178 IEM_MC_END();
3179 }
3180 return VINF_SUCCESS;
3181
3182 case 0: /* MMX */
3183 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3185 {
3186 /*
3187 * Register, register.
3188 */
3189 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3190 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3192 IEM_MC_BEGIN(0, 1);
3193 IEM_MC_LOCAL(uint64_t, u64Tmp);
3194 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3195 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3196 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3197 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3198 IEM_MC_ADVANCE_RIP();
3199 IEM_MC_END();
3200 }
3201 else
3202 {
3203 /*
3204 * Register, memory.
3205 */
3206 IEM_MC_BEGIN(0, 2);
3207 IEM_MC_LOCAL(uint64_t, u64Tmp);
3208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3209
3210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3212 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3213 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3214 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3215 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3216
3217 IEM_MC_ADVANCE_RIP();
3218 IEM_MC_END();
3219 }
3220 return VINF_SUCCESS;
3221
3222 default:
3223 return IEMOP_RAISE_INVALID_OPCODE();
3224 }
3225}
3226
3227
3228/** Opcode 0x0f 0x70. The immediate here is evil! */
3229FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
3230{
3231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3232 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3233 {
3234 case IEM_OP_PRF_SIZE_OP: /* SSE */
3235 case IEM_OP_PRF_REPNZ: /* SSE */
3236 case IEM_OP_PRF_REPZ: /* SSE */
3237 {
3238 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
3239 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3240 {
3241 case IEM_OP_PRF_SIZE_OP:
3242 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
3243 pfnAImpl = iemAImpl_pshufd;
3244 break;
3245 case IEM_OP_PRF_REPNZ:
3246 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
3247 pfnAImpl = iemAImpl_pshuflw;
3248 break;
3249 case IEM_OP_PRF_REPZ:
3250 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
3251 pfnAImpl = iemAImpl_pshufhw;
3252 break;
3253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3254 }
3255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3256 {
3257 /*
3258 * Register, register.
3259 */
3260 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3262
3263 IEM_MC_BEGIN(3, 0);
3264 IEM_MC_ARG(uint128_t *, pDst, 0);
3265 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3266 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3267 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3268 IEM_MC_PREPARE_SSE_USAGE();
3269 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3270 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3271 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3272 IEM_MC_ADVANCE_RIP();
3273 IEM_MC_END();
3274 }
3275 else
3276 {
3277 /*
3278 * Register, memory.
3279 */
3280 IEM_MC_BEGIN(3, 2);
3281 IEM_MC_ARG(uint128_t *, pDst, 0);
3282 IEM_MC_LOCAL(uint128_t, uSrc);
3283 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3285
3286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3287 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3288 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3290 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3291
3292 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3293 IEM_MC_PREPARE_SSE_USAGE();
3294 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3295 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3296
3297 IEM_MC_ADVANCE_RIP();
3298 IEM_MC_END();
3299 }
3300 return VINF_SUCCESS;
3301 }
3302
3303 case 0: /* MMX Extension */
3304 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3306 {
3307 /*
3308 * Register, register.
3309 */
3310 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3312
3313 IEM_MC_BEGIN(3, 0);
3314 IEM_MC_ARG(uint64_t *, pDst, 0);
3315 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3316 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3317 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3318 IEM_MC_PREPARE_FPU_USAGE();
3319 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3320 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3321 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3322 IEM_MC_ADVANCE_RIP();
3323 IEM_MC_END();
3324 }
3325 else
3326 {
3327 /*
3328 * Register, memory.
3329 */
3330 IEM_MC_BEGIN(3, 2);
3331 IEM_MC_ARG(uint64_t *, pDst, 0);
3332 IEM_MC_LOCAL(uint64_t, uSrc);
3333 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3335
3336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3337 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3338 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3340 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3341
3342 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3343 IEM_MC_PREPARE_FPU_USAGE();
3344 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3345 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3346
3347 IEM_MC_ADVANCE_RIP();
3348 IEM_MC_END();
3349 }
3350 return VINF_SUCCESS;
3351
3352 default:
3353 return IEMOP_RAISE_INVALID_OPCODE();
3354 }
3355}
3356
3357
3358/** Opcode 0x0f 0x71 11/2. */
3359FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3360
3361/** Opcode 0x66 0x0f 0x71 11/2. */
3362FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3363
3364/** Opcode 0x0f 0x71 11/4. */
3365FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3366
3367/** Opcode 0x66 0x0f 0x71 11/4. */
3368FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3369
3370/** Opcode 0x0f 0x71 11/6. */
3371FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3372
3373/** Opcode 0x66 0x0f 0x71 11/6. */
3374FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3375
3376
3377/** Opcode 0x0f 0x71. */
3378FNIEMOP_DEF(iemOp_Grp12)
3379{
3380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3381 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3382 return IEMOP_RAISE_INVALID_OPCODE();
3383 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3384 {
3385 case 0: case 1: case 3: case 5: case 7:
3386 return IEMOP_RAISE_INVALID_OPCODE();
3387 case 2:
3388 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3389 {
3390 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3391 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3392 default: return IEMOP_RAISE_INVALID_OPCODE();
3393 }
3394 case 4:
3395 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3396 {
3397 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3398 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3399 default: return IEMOP_RAISE_INVALID_OPCODE();
3400 }
3401 case 6:
3402 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3403 {
3404 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3405 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3406 default: return IEMOP_RAISE_INVALID_OPCODE();
3407 }
3408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3409 }
3410}
3411
3412
3413/** Opcode 0x0f 0x72 11/2. */
3414FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3415
3416/** Opcode 0x66 0x0f 0x72 11/2. */
3417FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3418
3419/** Opcode 0x0f 0x72 11/4. */
3420FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3421
3422/** Opcode 0x66 0x0f 0x72 11/4. */
3423FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3424
3425/** Opcode 0x0f 0x72 11/6. */
3426FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3427
3428/** Opcode 0x66 0x0f 0x72 11/6. */
3429FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3430
3431
3432/** Opcode 0x0f 0x72. */
3433FNIEMOP_DEF(iemOp_Grp13)
3434{
3435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3436 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3437 return IEMOP_RAISE_INVALID_OPCODE();
3438 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3439 {
3440 case 0: case 1: case 3: case 5: case 7:
3441 return IEMOP_RAISE_INVALID_OPCODE();
3442 case 2:
3443 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3444 {
3445 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3446 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3447 default: return IEMOP_RAISE_INVALID_OPCODE();
3448 }
3449 case 4:
3450 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3451 {
3452 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3453 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3454 default: return IEMOP_RAISE_INVALID_OPCODE();
3455 }
3456 case 6:
3457 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3458 {
3459 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3460 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3461 default: return IEMOP_RAISE_INVALID_OPCODE();
3462 }
3463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3464 }
3465}
3466
3467
3468/** Opcode 0x0f 0x73 11/2. */
3469FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3470
3471/** Opcode 0x66 0x0f 0x73 11/2. */
3472FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3473
3474/** Opcode 0x66 0x0f 0x73 11/3. */
3475FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3476
3477/** Opcode 0x0f 0x73 11/6. */
3478FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3479
3480/** Opcode 0x66 0x0f 0x73 11/6. */
3481FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3482
3483/** Opcode 0x66 0x0f 0x73 11/7. */
3484FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3485
3486
3487/** Opcode 0x0f 0x73. */
3488FNIEMOP_DEF(iemOp_Grp14)
3489{
3490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3491 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3492 return IEMOP_RAISE_INVALID_OPCODE();
3493 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3494 {
3495 case 0: case 1: case 4: case 5:
3496 return IEMOP_RAISE_INVALID_OPCODE();
3497 case 2:
3498 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3499 {
3500 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3501 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3502 default: return IEMOP_RAISE_INVALID_OPCODE();
3503 }
3504 case 3:
3505 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3506 {
3507 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3508 default: return IEMOP_RAISE_INVALID_OPCODE();
3509 }
3510 case 6:
3511 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3512 {
3513 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3514 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3515 default: return IEMOP_RAISE_INVALID_OPCODE();
3516 }
3517 case 7:
3518 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3519 {
3520 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3521 default: return IEMOP_RAISE_INVALID_OPCODE();
3522 }
3523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3524 }
3525}
3526
3527
3528/**
3529 * Common worker for SSE2 and MMX instructions on the forms:
3530 * pxxx mm1, mm2/mem64
3531 * pxxx xmm1, xmm2/mem128
3532 *
3533 * Proper alignment of the 128-bit operand is enforced.
3534 * Exceptions type 4. SSE2 and MMX cpuid checks.
3535 */
3536FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3537{
3538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3539 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3540 {
3541 case IEM_OP_PRF_SIZE_OP: /* SSE */
3542 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3543 {
3544 /*
3545 * Register, register.
3546 */
3547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3548 IEM_MC_BEGIN(2, 0);
3549 IEM_MC_ARG(uint128_t *, pDst, 0);
3550 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3551 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3552 IEM_MC_PREPARE_SSE_USAGE();
3553 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3554 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3555 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3556 IEM_MC_ADVANCE_RIP();
3557 IEM_MC_END();
3558 }
3559 else
3560 {
3561 /*
3562 * Register, memory.
3563 */
3564 IEM_MC_BEGIN(2, 2);
3565 IEM_MC_ARG(uint128_t *, pDst, 0);
3566 IEM_MC_LOCAL(uint128_t, uSrc);
3567 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3569
3570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3573 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3574
3575 IEM_MC_PREPARE_SSE_USAGE();
3576 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3577 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3578
3579 IEM_MC_ADVANCE_RIP();
3580 IEM_MC_END();
3581 }
3582 return VINF_SUCCESS;
3583
3584 case 0: /* MMX */
3585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3586 {
3587 /*
3588 * Register, register.
3589 */
3590 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3591 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3593 IEM_MC_BEGIN(2, 0);
3594 IEM_MC_ARG(uint64_t *, pDst, 0);
3595 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3596 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3597 IEM_MC_PREPARE_FPU_USAGE();
3598 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3599 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3600 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3601 IEM_MC_ADVANCE_RIP();
3602 IEM_MC_END();
3603 }
3604 else
3605 {
3606 /*
3607 * Register, memory.
3608 */
3609 IEM_MC_BEGIN(2, 2);
3610 IEM_MC_ARG(uint64_t *, pDst, 0);
3611 IEM_MC_LOCAL(uint64_t, uSrc);
3612 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3614
3615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3617 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3618 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3619
3620 IEM_MC_PREPARE_FPU_USAGE();
3621 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3622 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3623
3624 IEM_MC_ADVANCE_RIP();
3625 IEM_MC_END();
3626 }
3627 return VINF_SUCCESS;
3628
3629 default:
3630 return IEMOP_RAISE_INVALID_OPCODE();
3631 }
3632}
3633
3634
3635/** Opcode 0x0f 0x74. */
3636FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3637{
3638 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3639 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3640}
3641
3642
3643/** Opcode 0x0f 0x75. */
3644FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3645{
3646 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3647 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3648}
3649
3650
3651/** Opcode 0x0f 0x76. */
3652FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3653{
3654 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3655 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3656}
3657
3658
3659/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3660FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3661/* Opcode 0x66 0x0f 0x77 - invalid */
3662/* Opcode 0xf3 0x0f 0x77 - invalid */
3663/* Opcode 0xf2 0x0f 0x77 - invalid */
3664
3665/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3666FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3667/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3668FNIEMOP_STUB(iemOp_AmdGrp17);
3669/* Opcode 0xf3 0x0f 0x78 - invalid */
3670/* Opcode 0xf2 0x0f 0x78 - invalid */
3671
3672/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3673FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3674/* Opcode 0x66 0x0f 0x79 - invalid */
3675/* Opcode 0xf3 0x0f 0x79 - invalid */
3676/* Opcode 0xf2 0x0f 0x79 - invalid */
3677
3678/* Opcode 0x0f 0x7a - invalid */
3679/* Opcode 0x66 0x0f 0x7a - invalid */
3680/* Opcode 0xf3 0x0f 0x7a - invalid */
3681/* Opcode 0xf2 0x0f 0x7a - invalid */
3682
3683/* Opcode 0x0f 0x7b - invalid */
3684/* Opcode 0x66 0x0f 0x7b - invalid */
3685/* Opcode 0xf3 0x0f 0x7b - invalid */
3686/* Opcode 0xf2 0x0f 0x7b - invalid */
3687
3688/* Opcode 0x0f 0x7c - invalid */
3689/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3690FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3691/* Opcode 0xf3 0x0f 0x7c - invalid */
3692/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3693FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3694
3695/* Opcode 0x0f 0x7d - invalid */
3696/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3697FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3698/* Opcode 0xf3 0x0f 0x7d - invalid */
3699/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3700FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3701
3702
3703/** Opcode 0x0f 0x7e. */
3704FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3705{
3706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3707 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3708 {
3709 case IEM_OP_PRF_SIZE_OP: /* SSE */
3710 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3711 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3712 else
3713 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3715 {
3716 /* greg, XMM */
3717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3718 IEM_MC_BEGIN(0, 1);
3719 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3720 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3721 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3722 {
3723 IEM_MC_LOCAL(uint64_t, u64Tmp);
3724 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3725 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3726 }
3727 else
3728 {
3729 IEM_MC_LOCAL(uint32_t, u32Tmp);
3730 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3731 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3732 }
3733 IEM_MC_ADVANCE_RIP();
3734 IEM_MC_END();
3735 }
3736 else
3737 {
3738 /* [mem], XMM */
3739 IEM_MC_BEGIN(0, 2);
3740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3741 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3744 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3745 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3746 {
3747 IEM_MC_LOCAL(uint64_t, u64Tmp);
3748 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3749 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3750 }
3751 else
3752 {
3753 IEM_MC_LOCAL(uint32_t, u32Tmp);
3754 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3755 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3756 }
3757 IEM_MC_ADVANCE_RIP();
3758 IEM_MC_END();
3759 }
3760 return VINF_SUCCESS;
3761
3762 case 0: /* MMX */
3763 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3764 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3765 else
3766 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3768 {
3769 /* greg, MMX */
3770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3771 IEM_MC_BEGIN(0, 1);
3772 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3773 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3774 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3775 {
3776 IEM_MC_LOCAL(uint64_t, u64Tmp);
3777 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3778 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3779 }
3780 else
3781 {
3782 IEM_MC_LOCAL(uint32_t, u32Tmp);
3783 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3784 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3785 }
3786 IEM_MC_ADVANCE_RIP();
3787 IEM_MC_END();
3788 }
3789 else
3790 {
3791 /* [mem], MMX */
3792 IEM_MC_BEGIN(0, 2);
3793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3794 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3797 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3798 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3799 {
3800 IEM_MC_LOCAL(uint64_t, u64Tmp);
3801 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3802 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3803 }
3804 else
3805 {
3806 IEM_MC_LOCAL(uint32_t, u32Tmp);
3807 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3808 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3809 }
3810 IEM_MC_ADVANCE_RIP();
3811 IEM_MC_END();
3812 }
3813 return VINF_SUCCESS;
3814
3815 default:
3816 return IEMOP_RAISE_INVALID_OPCODE();
3817 }
3818}
3819
3820
3821/** Opcode 0x0f 0x7f. */
3822FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3823{
3824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3825 bool fAligned = false;
3826 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3827 {
3828 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3829 fAligned = true;
3830 /* fall thru */
3831 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3832 if (fAligned)
3833 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3834 else
3835 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3836 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3837 {
3838 /*
3839 * Register, register.
3840 */
3841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3842 IEM_MC_BEGIN(0, 0);
3843 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3844 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3845 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3846 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3847 IEM_MC_ADVANCE_RIP();
3848 IEM_MC_END();
3849 }
3850 else
3851 {
3852 /*
3853 * Register, memory.
3854 */
3855 IEM_MC_BEGIN(0, 2);
3856 IEM_MC_LOCAL(uint128_t, u128Tmp);
3857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3858
3859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3862 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3863
3864 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3865 if (fAligned)
3866 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3867 else
3868 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3869
3870 IEM_MC_ADVANCE_RIP();
3871 IEM_MC_END();
3872 }
3873 return VINF_SUCCESS;
3874
3875 case 0: /* MMX */
3876 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3877
3878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3879 {
3880 /*
3881 * Register, register.
3882 */
3883 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3884 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_BEGIN(0, 1);
3887 IEM_MC_LOCAL(uint64_t, u64Tmp);
3888 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3889 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3890 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3891 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3892 IEM_MC_ADVANCE_RIP();
3893 IEM_MC_END();
3894 }
3895 else
3896 {
3897 /*
3898 * Register, memory.
3899 */
3900 IEM_MC_BEGIN(0, 2);
3901 IEM_MC_LOCAL(uint64_t, u64Tmp);
3902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3903
3904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3906 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3907 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3908
3909 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3910 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3911
3912 IEM_MC_ADVANCE_RIP();
3913 IEM_MC_END();
3914 }
3915 return VINF_SUCCESS;
3916
3917 default:
3918 return IEMOP_RAISE_INVALID_OPCODE();
3919 }
3920}
3921
3922
3923
3924/** Opcode 0x0f 0x80. */
3925FNIEMOP_DEF(iemOp_jo_Jv)
3926{
3927 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3928 IEMOP_HLP_MIN_386();
3929 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3930 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3931 {
3932 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3934
3935 IEM_MC_BEGIN(0, 0);
3936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3937 IEM_MC_REL_JMP_S16(i16Imm);
3938 } IEM_MC_ELSE() {
3939 IEM_MC_ADVANCE_RIP();
3940 } IEM_MC_ENDIF();
3941 IEM_MC_END();
3942 }
3943 else
3944 {
3945 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3947
3948 IEM_MC_BEGIN(0, 0);
3949 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3950 IEM_MC_REL_JMP_S32(i32Imm);
3951 } IEM_MC_ELSE() {
3952 IEM_MC_ADVANCE_RIP();
3953 } IEM_MC_ENDIF();
3954 IEM_MC_END();
3955 }
3956 return VINF_SUCCESS;
3957}
3958
3959
3960/** Opcode 0x0f 0x81. */
3961FNIEMOP_DEF(iemOp_jno_Jv)
3962{
3963 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3964 IEMOP_HLP_MIN_386();
3965 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3966 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3967 {
3968 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3970
3971 IEM_MC_BEGIN(0, 0);
3972 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3973 IEM_MC_ADVANCE_RIP();
3974 } IEM_MC_ELSE() {
3975 IEM_MC_REL_JMP_S16(i16Imm);
3976 } IEM_MC_ENDIF();
3977 IEM_MC_END();
3978 }
3979 else
3980 {
3981 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3983
3984 IEM_MC_BEGIN(0, 0);
3985 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3986 IEM_MC_ADVANCE_RIP();
3987 } IEM_MC_ELSE() {
3988 IEM_MC_REL_JMP_S32(i32Imm);
3989 } IEM_MC_ENDIF();
3990 IEM_MC_END();
3991 }
3992 return VINF_SUCCESS;
3993}
3994
3995
3996/** Opcode 0x0f 0x82. */
3997FNIEMOP_DEF(iemOp_jc_Jv)
3998{
3999 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4000 IEMOP_HLP_MIN_386();
4001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4002 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4003 {
4004 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4006
4007 IEM_MC_BEGIN(0, 0);
4008 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4009 IEM_MC_REL_JMP_S16(i16Imm);
4010 } IEM_MC_ELSE() {
4011 IEM_MC_ADVANCE_RIP();
4012 } IEM_MC_ENDIF();
4013 IEM_MC_END();
4014 }
4015 else
4016 {
4017 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4019
4020 IEM_MC_BEGIN(0, 0);
4021 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4022 IEM_MC_REL_JMP_S32(i32Imm);
4023 } IEM_MC_ELSE() {
4024 IEM_MC_ADVANCE_RIP();
4025 } IEM_MC_ENDIF();
4026 IEM_MC_END();
4027 }
4028 return VINF_SUCCESS;
4029}
4030
4031
4032/** Opcode 0x0f 0x83. */
4033FNIEMOP_DEF(iemOp_jnc_Jv)
4034{
4035 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4036 IEMOP_HLP_MIN_386();
4037 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4038 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4039 {
4040 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4042
4043 IEM_MC_BEGIN(0, 0);
4044 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4045 IEM_MC_ADVANCE_RIP();
4046 } IEM_MC_ELSE() {
4047 IEM_MC_REL_JMP_S16(i16Imm);
4048 } IEM_MC_ENDIF();
4049 IEM_MC_END();
4050 }
4051 else
4052 {
4053 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4055
4056 IEM_MC_BEGIN(0, 0);
4057 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4058 IEM_MC_ADVANCE_RIP();
4059 } IEM_MC_ELSE() {
4060 IEM_MC_REL_JMP_S32(i32Imm);
4061 } IEM_MC_ENDIF();
4062 IEM_MC_END();
4063 }
4064 return VINF_SUCCESS;
4065}
4066
4067
4068/** Opcode 0x0f 0x84. */
4069FNIEMOP_DEF(iemOp_je_Jv)
4070{
4071 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4072 IEMOP_HLP_MIN_386();
4073 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4074 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4075 {
4076 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4078
4079 IEM_MC_BEGIN(0, 0);
4080 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4081 IEM_MC_REL_JMP_S16(i16Imm);
4082 } IEM_MC_ELSE() {
4083 IEM_MC_ADVANCE_RIP();
4084 } IEM_MC_ENDIF();
4085 IEM_MC_END();
4086 }
4087 else
4088 {
4089 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4091
4092 IEM_MC_BEGIN(0, 0);
4093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4094 IEM_MC_REL_JMP_S32(i32Imm);
4095 } IEM_MC_ELSE() {
4096 IEM_MC_ADVANCE_RIP();
4097 } IEM_MC_ENDIF();
4098 IEM_MC_END();
4099 }
4100 return VINF_SUCCESS;
4101}
4102
4103
4104/** Opcode 0x0f 0x85. */
4105FNIEMOP_DEF(iemOp_jne_Jv)
4106{
4107 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4108 IEMOP_HLP_MIN_386();
4109 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4110 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4111 {
4112 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114
4115 IEM_MC_BEGIN(0, 0);
4116 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4117 IEM_MC_ADVANCE_RIP();
4118 } IEM_MC_ELSE() {
4119 IEM_MC_REL_JMP_S16(i16Imm);
4120 } IEM_MC_ENDIF();
4121 IEM_MC_END();
4122 }
4123 else
4124 {
4125 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4127
4128 IEM_MC_BEGIN(0, 0);
4129 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4130 IEM_MC_ADVANCE_RIP();
4131 } IEM_MC_ELSE() {
4132 IEM_MC_REL_JMP_S32(i32Imm);
4133 } IEM_MC_ENDIF();
4134 IEM_MC_END();
4135 }
4136 return VINF_SUCCESS;
4137}
4138
4139
4140/** Opcode 0x0f 0x86. */
4141FNIEMOP_DEF(iemOp_jbe_Jv)
4142{
4143 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4144 IEMOP_HLP_MIN_386();
4145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4146 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4147 {
4148 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4150
4151 IEM_MC_BEGIN(0, 0);
4152 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4153 IEM_MC_REL_JMP_S16(i16Imm);
4154 } IEM_MC_ELSE() {
4155 IEM_MC_ADVANCE_RIP();
4156 } IEM_MC_ENDIF();
4157 IEM_MC_END();
4158 }
4159 else
4160 {
4161 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4163
4164 IEM_MC_BEGIN(0, 0);
4165 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4166 IEM_MC_REL_JMP_S32(i32Imm);
4167 } IEM_MC_ELSE() {
4168 IEM_MC_ADVANCE_RIP();
4169 } IEM_MC_ENDIF();
4170 IEM_MC_END();
4171 }
4172 return VINF_SUCCESS;
4173}
4174
4175
4176/** Opcode 0x0f 0x87. */
4177FNIEMOP_DEF(iemOp_jnbe_Jv)
4178{
4179 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4180 IEMOP_HLP_MIN_386();
4181 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4182 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4183 {
4184 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4186
4187 IEM_MC_BEGIN(0, 0);
4188 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4189 IEM_MC_ADVANCE_RIP();
4190 } IEM_MC_ELSE() {
4191 IEM_MC_REL_JMP_S16(i16Imm);
4192 } IEM_MC_ENDIF();
4193 IEM_MC_END();
4194 }
4195 else
4196 {
4197 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4199
4200 IEM_MC_BEGIN(0, 0);
4201 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4202 IEM_MC_ADVANCE_RIP();
4203 } IEM_MC_ELSE() {
4204 IEM_MC_REL_JMP_S32(i32Imm);
4205 } IEM_MC_ENDIF();
4206 IEM_MC_END();
4207 }
4208 return VINF_SUCCESS;
4209}
4210
4211
4212/** Opcode 0x0f 0x88. */
4213FNIEMOP_DEF(iemOp_js_Jv)
4214{
4215 IEMOP_MNEMONIC(js_Jv, "js Jv");
4216 IEMOP_HLP_MIN_386();
4217 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4218 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4219 {
4220 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4222
4223 IEM_MC_BEGIN(0, 0);
4224 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4225 IEM_MC_REL_JMP_S16(i16Imm);
4226 } IEM_MC_ELSE() {
4227 IEM_MC_ADVANCE_RIP();
4228 } IEM_MC_ENDIF();
4229 IEM_MC_END();
4230 }
4231 else
4232 {
4233 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4235
4236 IEM_MC_BEGIN(0, 0);
4237 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4238 IEM_MC_REL_JMP_S32(i32Imm);
4239 } IEM_MC_ELSE() {
4240 IEM_MC_ADVANCE_RIP();
4241 } IEM_MC_ENDIF();
4242 IEM_MC_END();
4243 }
4244 return VINF_SUCCESS;
4245}
4246
4247
4248/** Opcode 0x0f 0x89. */
4249FNIEMOP_DEF(iemOp_jns_Jv)
4250{
4251 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4252 IEMOP_HLP_MIN_386();
4253 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4254 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4255 {
4256 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4258
4259 IEM_MC_BEGIN(0, 0);
4260 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4261 IEM_MC_ADVANCE_RIP();
4262 } IEM_MC_ELSE() {
4263 IEM_MC_REL_JMP_S16(i16Imm);
4264 } IEM_MC_ENDIF();
4265 IEM_MC_END();
4266 }
4267 else
4268 {
4269 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4271
4272 IEM_MC_BEGIN(0, 0);
4273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4274 IEM_MC_ADVANCE_RIP();
4275 } IEM_MC_ELSE() {
4276 IEM_MC_REL_JMP_S32(i32Imm);
4277 } IEM_MC_ENDIF();
4278 IEM_MC_END();
4279 }
4280 return VINF_SUCCESS;
4281}
4282
4283
4284/** Opcode 0x0f 0x8a. */
4285FNIEMOP_DEF(iemOp_jp_Jv)
4286{
4287 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4288 IEMOP_HLP_MIN_386();
4289 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4290 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4291 {
4292 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4294
4295 IEM_MC_BEGIN(0, 0);
4296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4297 IEM_MC_REL_JMP_S16(i16Imm);
4298 } IEM_MC_ELSE() {
4299 IEM_MC_ADVANCE_RIP();
4300 } IEM_MC_ENDIF();
4301 IEM_MC_END();
4302 }
4303 else
4304 {
4305 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4307
4308 IEM_MC_BEGIN(0, 0);
4309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4310 IEM_MC_REL_JMP_S32(i32Imm);
4311 } IEM_MC_ELSE() {
4312 IEM_MC_ADVANCE_RIP();
4313 } IEM_MC_ENDIF();
4314 IEM_MC_END();
4315 }
4316 return VINF_SUCCESS;
4317}
4318
4319
4320/** Opcode 0x0f 0x8b. */
4321FNIEMOP_DEF(iemOp_jnp_Jv)
4322{
4323 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4324 IEMOP_HLP_MIN_386();
4325 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4326 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4327 {
4328 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4330
4331 IEM_MC_BEGIN(0, 0);
4332 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4333 IEM_MC_ADVANCE_RIP();
4334 } IEM_MC_ELSE() {
4335 IEM_MC_REL_JMP_S16(i16Imm);
4336 } IEM_MC_ENDIF();
4337 IEM_MC_END();
4338 }
4339 else
4340 {
4341 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4343
4344 IEM_MC_BEGIN(0, 0);
4345 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4346 IEM_MC_ADVANCE_RIP();
4347 } IEM_MC_ELSE() {
4348 IEM_MC_REL_JMP_S32(i32Imm);
4349 } IEM_MC_ENDIF();
4350 IEM_MC_END();
4351 }
4352 return VINF_SUCCESS;
4353}
4354
4355
4356/** Opcode 0x0f 0x8c. */
4357FNIEMOP_DEF(iemOp_jl_Jv)
4358{
4359 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4360 IEMOP_HLP_MIN_386();
4361 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4362 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4363 {
4364 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4366
4367 IEM_MC_BEGIN(0, 0);
4368 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4369 IEM_MC_REL_JMP_S16(i16Imm);
4370 } IEM_MC_ELSE() {
4371 IEM_MC_ADVANCE_RIP();
4372 } IEM_MC_ENDIF();
4373 IEM_MC_END();
4374 }
4375 else
4376 {
4377 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4379
4380 IEM_MC_BEGIN(0, 0);
4381 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4382 IEM_MC_REL_JMP_S32(i32Imm);
4383 } IEM_MC_ELSE() {
4384 IEM_MC_ADVANCE_RIP();
4385 } IEM_MC_ENDIF();
4386 IEM_MC_END();
4387 }
4388 return VINF_SUCCESS;
4389}
4390
4391
4392/** Opcode 0x0f 0x8d. */
4393FNIEMOP_DEF(iemOp_jnl_Jv)
4394{
4395 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4396 IEMOP_HLP_MIN_386();
4397 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4398 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4399 {
4400 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4402
4403 IEM_MC_BEGIN(0, 0);
4404 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4405 IEM_MC_ADVANCE_RIP();
4406 } IEM_MC_ELSE() {
4407 IEM_MC_REL_JMP_S16(i16Imm);
4408 } IEM_MC_ENDIF();
4409 IEM_MC_END();
4410 }
4411 else
4412 {
4413 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4415
4416 IEM_MC_BEGIN(0, 0);
4417 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4418 IEM_MC_ADVANCE_RIP();
4419 } IEM_MC_ELSE() {
4420 IEM_MC_REL_JMP_S32(i32Imm);
4421 } IEM_MC_ENDIF();
4422 IEM_MC_END();
4423 }
4424 return VINF_SUCCESS;
4425}
4426
4427
4428/** Opcode 0x0f 0x8e. */
4429FNIEMOP_DEF(iemOp_jle_Jv)
4430{
4431 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4432 IEMOP_HLP_MIN_386();
4433 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4434 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4435 {
4436 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4438
4439 IEM_MC_BEGIN(0, 0);
4440 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4441 IEM_MC_REL_JMP_S16(i16Imm);
4442 } IEM_MC_ELSE() {
4443 IEM_MC_ADVANCE_RIP();
4444 } IEM_MC_ENDIF();
4445 IEM_MC_END();
4446 }
4447 else
4448 {
4449 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4451
4452 IEM_MC_BEGIN(0, 0);
4453 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4454 IEM_MC_REL_JMP_S32(i32Imm);
4455 } IEM_MC_ELSE() {
4456 IEM_MC_ADVANCE_RIP();
4457 } IEM_MC_ENDIF();
4458 IEM_MC_END();
4459 }
4460 return VINF_SUCCESS;
4461}
4462
4463
4464/** Opcode 0x0f 0x8f. */
4465FNIEMOP_DEF(iemOp_jnle_Jv)
4466{
4467 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4468 IEMOP_HLP_MIN_386();
4469 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4470 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4471 {
4472 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4474
4475 IEM_MC_BEGIN(0, 0);
4476 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4477 IEM_MC_ADVANCE_RIP();
4478 } IEM_MC_ELSE() {
4479 IEM_MC_REL_JMP_S16(i16Imm);
4480 } IEM_MC_ENDIF();
4481 IEM_MC_END();
4482 }
4483 else
4484 {
4485 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4487
4488 IEM_MC_BEGIN(0, 0);
4489 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4490 IEM_MC_ADVANCE_RIP();
4491 } IEM_MC_ELSE() {
4492 IEM_MC_REL_JMP_S32(i32Imm);
4493 } IEM_MC_ENDIF();
4494 IEM_MC_END();
4495 }
4496 return VINF_SUCCESS;
4497}
4498
4499
4500/** Opcode 0x0f 0x90. */
4501FNIEMOP_DEF(iemOp_seto_Eb)
4502{
4503 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4504 IEMOP_HLP_MIN_386();
4505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4506
4507 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4508 * any way. AMD says it's "unused", whatever that means. We're
4509 * ignoring for now. */
4510 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4511 {
4512 /* register target */
4513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4514 IEM_MC_BEGIN(0, 0);
4515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4516 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4517 } IEM_MC_ELSE() {
4518 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4519 } IEM_MC_ENDIF();
4520 IEM_MC_ADVANCE_RIP();
4521 IEM_MC_END();
4522 }
4523 else
4524 {
4525 /* memory target */
4526 IEM_MC_BEGIN(0, 1);
4527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4530 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4531 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4532 } IEM_MC_ELSE() {
4533 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4534 } IEM_MC_ENDIF();
4535 IEM_MC_ADVANCE_RIP();
4536 IEM_MC_END();
4537 }
4538 return VINF_SUCCESS;
4539}
4540
4541
4542/** Opcode 0x0f 0x91. */
4543FNIEMOP_DEF(iemOp_setno_Eb)
4544{
4545 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4546 IEMOP_HLP_MIN_386();
4547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4548
4549 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4550 * any way. AMD says it's "unused", whatever that means. We're
4551 * ignoring for now. */
4552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4553 {
4554 /* register target */
4555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4556 IEM_MC_BEGIN(0, 0);
4557 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4558 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4559 } IEM_MC_ELSE() {
4560 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4561 } IEM_MC_ENDIF();
4562 IEM_MC_ADVANCE_RIP();
4563 IEM_MC_END();
4564 }
4565 else
4566 {
4567 /* memory target */
4568 IEM_MC_BEGIN(0, 1);
4569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4573 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4574 } IEM_MC_ELSE() {
4575 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4576 } IEM_MC_ENDIF();
4577 IEM_MC_ADVANCE_RIP();
4578 IEM_MC_END();
4579 }
4580 return VINF_SUCCESS;
4581}
4582
4583
4584/** Opcode 0x0f 0x92. */
4585FNIEMOP_DEF(iemOp_setc_Eb)
4586{
4587 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4588 IEMOP_HLP_MIN_386();
4589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4590
4591 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4592 * any way. AMD says it's "unused", whatever that means. We're
4593 * ignoring for now. */
4594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4595 {
4596 /* register target */
4597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4598 IEM_MC_BEGIN(0, 0);
4599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4600 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4601 } IEM_MC_ELSE() {
4602 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4603 } IEM_MC_ENDIF();
4604 IEM_MC_ADVANCE_RIP();
4605 IEM_MC_END();
4606 }
4607 else
4608 {
4609 /* memory target */
4610 IEM_MC_BEGIN(0, 1);
4611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4615 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4616 } IEM_MC_ELSE() {
4617 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4618 } IEM_MC_ENDIF();
4619 IEM_MC_ADVANCE_RIP();
4620 IEM_MC_END();
4621 }
4622 return VINF_SUCCESS;
4623}
4624
4625
4626/** Opcode 0x0f 0x93. */
4627FNIEMOP_DEF(iemOp_setnc_Eb)
4628{
4629 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4630 IEMOP_HLP_MIN_386();
4631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4632
4633 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4634 * any way. AMD says it's "unused", whatever that means. We're
4635 * ignoring for now. */
4636 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4637 {
4638 /* register target */
4639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4640 IEM_MC_BEGIN(0, 0);
4641 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4642 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4643 } IEM_MC_ELSE() {
4644 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4645 } IEM_MC_ENDIF();
4646 IEM_MC_ADVANCE_RIP();
4647 IEM_MC_END();
4648 }
4649 else
4650 {
4651 /* memory target */
4652 IEM_MC_BEGIN(0, 1);
4653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4657 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4658 } IEM_MC_ELSE() {
4659 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4660 } IEM_MC_ENDIF();
4661 IEM_MC_ADVANCE_RIP();
4662 IEM_MC_END();
4663 }
4664 return VINF_SUCCESS;
4665}
4666
4667
4668/** Opcode 0x0f 0x94. */
4669FNIEMOP_DEF(iemOp_sete_Eb)
4670{
4671 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4672 IEMOP_HLP_MIN_386();
4673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4674
4675 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4676 * any way. AMD says it's "unused", whatever that means. We're
4677 * ignoring for now. */
4678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4679 {
4680 /* register target */
4681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4682 IEM_MC_BEGIN(0, 0);
4683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4684 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4685 } IEM_MC_ELSE() {
4686 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4687 } IEM_MC_ENDIF();
4688 IEM_MC_ADVANCE_RIP();
4689 IEM_MC_END();
4690 }
4691 else
4692 {
4693 /* memory target */
4694 IEM_MC_BEGIN(0, 1);
4695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4699 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4700 } IEM_MC_ELSE() {
4701 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4702 } IEM_MC_ENDIF();
4703 IEM_MC_ADVANCE_RIP();
4704 IEM_MC_END();
4705 }
4706 return VINF_SUCCESS;
4707}
4708
4709
4710/** Opcode 0x0f 0x95. */
4711FNIEMOP_DEF(iemOp_setne_Eb)
4712{
4713 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4714 IEMOP_HLP_MIN_386();
4715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4716
4717 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4718 * any way. AMD says it's "unused", whatever that means. We're
4719 * ignoring for now. */
4720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4721 {
4722 /* register target */
4723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4724 IEM_MC_BEGIN(0, 0);
4725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4726 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4727 } IEM_MC_ELSE() {
4728 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4729 } IEM_MC_ENDIF();
4730 IEM_MC_ADVANCE_RIP();
4731 IEM_MC_END();
4732 }
4733 else
4734 {
4735 /* memory target */
4736 IEM_MC_BEGIN(0, 1);
4737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4741 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4742 } IEM_MC_ELSE() {
4743 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4744 } IEM_MC_ENDIF();
4745 IEM_MC_ADVANCE_RIP();
4746 IEM_MC_END();
4747 }
4748 return VINF_SUCCESS;
4749}
4750
4751
4752/** Opcode 0x0f 0x96. */
4753FNIEMOP_DEF(iemOp_setbe_Eb)
4754{
4755 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4756 IEMOP_HLP_MIN_386();
4757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4758
4759 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4760 * any way. AMD says it's "unused", whatever that means. We're
4761 * ignoring for now. */
4762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4763 {
4764 /* register target */
4765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4766 IEM_MC_BEGIN(0, 0);
4767 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4768 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4769 } IEM_MC_ELSE() {
4770 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4771 } IEM_MC_ENDIF();
4772 IEM_MC_ADVANCE_RIP();
4773 IEM_MC_END();
4774 }
4775 else
4776 {
4777 /* memory target */
4778 IEM_MC_BEGIN(0, 1);
4779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4783 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4784 } IEM_MC_ELSE() {
4785 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4786 } IEM_MC_ENDIF();
4787 IEM_MC_ADVANCE_RIP();
4788 IEM_MC_END();
4789 }
4790 return VINF_SUCCESS;
4791}
4792
4793
4794/** Opcode 0x0f 0x97. */
4795FNIEMOP_DEF(iemOp_setnbe_Eb)
4796{
4797 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4798 IEMOP_HLP_MIN_386();
4799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4800
4801 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4802 * any way. AMD says it's "unused", whatever that means. We're
4803 * ignoring for now. */
4804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4805 {
4806 /* register target */
4807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4808 IEM_MC_BEGIN(0, 0);
4809 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4810 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4811 } IEM_MC_ELSE() {
4812 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4813 } IEM_MC_ENDIF();
4814 IEM_MC_ADVANCE_RIP();
4815 IEM_MC_END();
4816 }
4817 else
4818 {
4819 /* memory target */
4820 IEM_MC_BEGIN(0, 1);
4821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4824 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4825 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4826 } IEM_MC_ELSE() {
4827 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4828 } IEM_MC_ENDIF();
4829 IEM_MC_ADVANCE_RIP();
4830 IEM_MC_END();
4831 }
4832 return VINF_SUCCESS;
4833}
4834
4835
4836/** Opcode 0x0f 0x98. */
4837FNIEMOP_DEF(iemOp_sets_Eb)
4838{
4839 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4840 IEMOP_HLP_MIN_386();
4841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4842
4843 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4844 * any way. AMD says it's "unused", whatever that means. We're
4845 * ignoring for now. */
4846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4847 {
4848 /* register target */
4849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4850 IEM_MC_BEGIN(0, 0);
4851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4852 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4853 } IEM_MC_ELSE() {
4854 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4855 } IEM_MC_ENDIF();
4856 IEM_MC_ADVANCE_RIP();
4857 IEM_MC_END();
4858 }
4859 else
4860 {
4861 /* memory target */
4862 IEM_MC_BEGIN(0, 1);
4863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4867 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4868 } IEM_MC_ELSE() {
4869 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4870 } IEM_MC_ENDIF();
4871 IEM_MC_ADVANCE_RIP();
4872 IEM_MC_END();
4873 }
4874 return VINF_SUCCESS;
4875}
4876
4877
4878/** Opcode 0x0f 0x99. */
4879FNIEMOP_DEF(iemOp_setns_Eb)
4880{
4881 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4882 IEMOP_HLP_MIN_386();
4883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4884
4885 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4886 * any way. AMD says it's "unused", whatever that means. We're
4887 * ignoring for now. */
4888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4889 {
4890 /* register target */
4891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4892 IEM_MC_BEGIN(0, 0);
4893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4894 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4895 } IEM_MC_ELSE() {
4896 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4897 } IEM_MC_ENDIF();
4898 IEM_MC_ADVANCE_RIP();
4899 IEM_MC_END();
4900 }
4901 else
4902 {
4903 /* memory target */
4904 IEM_MC_BEGIN(0, 1);
4905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4909 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4910 } IEM_MC_ELSE() {
4911 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4912 } IEM_MC_ENDIF();
4913 IEM_MC_ADVANCE_RIP();
4914 IEM_MC_END();
4915 }
4916 return VINF_SUCCESS;
4917}
4918
4919
4920/** Opcode 0x0f 0x9a. */
4921FNIEMOP_DEF(iemOp_setp_Eb)
4922{
4923 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4924 IEMOP_HLP_MIN_386();
4925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4926
4927 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4928 * any way. AMD says it's "unused", whatever that means. We're
4929 * ignoring for now. */
4930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4931 {
4932 /* register target */
4933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4934 IEM_MC_BEGIN(0, 0);
4935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4936 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4937 } IEM_MC_ELSE() {
4938 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4939 } IEM_MC_ENDIF();
4940 IEM_MC_ADVANCE_RIP();
4941 IEM_MC_END();
4942 }
4943 else
4944 {
4945 /* memory target */
4946 IEM_MC_BEGIN(0, 1);
4947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4951 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4952 } IEM_MC_ELSE() {
4953 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4954 } IEM_MC_ENDIF();
4955 IEM_MC_ADVANCE_RIP();
4956 IEM_MC_END();
4957 }
4958 return VINF_SUCCESS;
4959}
4960
4961
4962/** Opcode 0x0f 0x9b. */
4963FNIEMOP_DEF(iemOp_setnp_Eb)
4964{
4965 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4966 IEMOP_HLP_MIN_386();
4967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4968
4969 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4970 * any way. AMD says it's "unused", whatever that means. We're
4971 * ignoring for now. */
4972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4973 {
4974 /* register target */
4975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4976 IEM_MC_BEGIN(0, 0);
4977 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4978 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4979 } IEM_MC_ELSE() {
4980 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4981 } IEM_MC_ENDIF();
4982 IEM_MC_ADVANCE_RIP();
4983 IEM_MC_END();
4984 }
4985 else
4986 {
4987 /* memory target */
4988 IEM_MC_BEGIN(0, 1);
4989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4993 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4994 } IEM_MC_ELSE() {
4995 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4996 } IEM_MC_ENDIF();
4997 IEM_MC_ADVANCE_RIP();
4998 IEM_MC_END();
4999 }
5000 return VINF_SUCCESS;
5001}
5002
5003
5004/** Opcode 0x0f 0x9c. */
5005FNIEMOP_DEF(iemOp_setl_Eb)
5006{
5007 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5008 IEMOP_HLP_MIN_386();
5009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5010
5011 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5012 * any way. AMD says it's "unused", whatever that means. We're
5013 * ignoring for now. */
5014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5015 {
5016 /* register target */
5017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5018 IEM_MC_BEGIN(0, 0);
5019 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5020 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5021 } IEM_MC_ELSE() {
5022 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5023 } IEM_MC_ENDIF();
5024 IEM_MC_ADVANCE_RIP();
5025 IEM_MC_END();
5026 }
5027 else
5028 {
5029 /* memory target */
5030 IEM_MC_BEGIN(0, 1);
5031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5034 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5035 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5036 } IEM_MC_ELSE() {
5037 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5038 } IEM_MC_ENDIF();
5039 IEM_MC_ADVANCE_RIP();
5040 IEM_MC_END();
5041 }
5042 return VINF_SUCCESS;
5043}
5044
5045
5046/** Opcode 0x0f 0x9d. */
5047FNIEMOP_DEF(iemOp_setnl_Eb)
5048{
5049 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5050 IEMOP_HLP_MIN_386();
5051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5052
5053 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5054 * any way. AMD says it's "unused", whatever that means. We're
5055 * ignoring for now. */
5056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5057 {
5058 /* register target */
5059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5060 IEM_MC_BEGIN(0, 0);
5061 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5062 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5063 } IEM_MC_ELSE() {
5064 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5065 } IEM_MC_ENDIF();
5066 IEM_MC_ADVANCE_RIP();
5067 IEM_MC_END();
5068 }
5069 else
5070 {
5071 /* memory target */
5072 IEM_MC_BEGIN(0, 1);
5073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5076 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5077 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5078 } IEM_MC_ELSE() {
5079 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5080 } IEM_MC_ENDIF();
5081 IEM_MC_ADVANCE_RIP();
5082 IEM_MC_END();
5083 }
5084 return VINF_SUCCESS;
5085}
5086
5087
5088/** Opcode 0x0f 0x9e. */
5089FNIEMOP_DEF(iemOp_setle_Eb)
5090{
5091 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5092 IEMOP_HLP_MIN_386();
5093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5094
5095 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5096 * any way. AMD says it's "unused", whatever that means. We're
5097 * ignoring for now. */
5098 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5099 {
5100 /* register target */
5101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5102 IEM_MC_BEGIN(0, 0);
5103 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5104 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5105 } IEM_MC_ELSE() {
5106 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5107 } IEM_MC_ENDIF();
5108 IEM_MC_ADVANCE_RIP();
5109 IEM_MC_END();
5110 }
5111 else
5112 {
5113 /* memory target */
5114 IEM_MC_BEGIN(0, 1);
5115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5118 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5119 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5120 } IEM_MC_ELSE() {
5121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5122 } IEM_MC_ENDIF();
5123 IEM_MC_ADVANCE_RIP();
5124 IEM_MC_END();
5125 }
5126 return VINF_SUCCESS;
5127}
5128
5129
5130/** Opcode 0x0f 0x9f. */
5131FNIEMOP_DEF(iemOp_setnle_Eb)
5132{
5133 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5134 IEMOP_HLP_MIN_386();
5135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5136
5137 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5138 * any way. AMD says it's "unused", whatever that means. We're
5139 * ignoring for now. */
5140 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5141 {
5142 /* register target */
5143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5144 IEM_MC_BEGIN(0, 0);
5145 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5146 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5147 } IEM_MC_ELSE() {
5148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5149 } IEM_MC_ENDIF();
5150 IEM_MC_ADVANCE_RIP();
5151 IEM_MC_END();
5152 }
5153 else
5154 {
5155 /* memory target */
5156 IEM_MC_BEGIN(0, 1);
5157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5160 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5161 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5162 } IEM_MC_ELSE() {
5163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5164 } IEM_MC_ENDIF();
5165 IEM_MC_ADVANCE_RIP();
5166 IEM_MC_END();
5167 }
5168 return VINF_SUCCESS;
5169}
5170
5171
5172/**
5173 * Common 'push segment-register' helper.
5174 */
5175FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5176{
5177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5178 if (iReg < X86_SREG_FS)
5179 IEMOP_HLP_NO_64BIT();
5180 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5181
5182 switch (pVCpu->iem.s.enmEffOpSize)
5183 {
5184 case IEMMODE_16BIT:
5185 IEM_MC_BEGIN(0, 1);
5186 IEM_MC_LOCAL(uint16_t, u16Value);
5187 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5188 IEM_MC_PUSH_U16(u16Value);
5189 IEM_MC_ADVANCE_RIP();
5190 IEM_MC_END();
5191 break;
5192
5193 case IEMMODE_32BIT:
5194 IEM_MC_BEGIN(0, 1);
5195 IEM_MC_LOCAL(uint32_t, u32Value);
5196 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5197 IEM_MC_PUSH_U32_SREG(u32Value);
5198 IEM_MC_ADVANCE_RIP();
5199 IEM_MC_END();
5200 break;
5201
5202 case IEMMODE_64BIT:
5203 IEM_MC_BEGIN(0, 1);
5204 IEM_MC_LOCAL(uint64_t, u64Value);
5205 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5206 IEM_MC_PUSH_U64(u64Value);
5207 IEM_MC_ADVANCE_RIP();
5208 IEM_MC_END();
5209 break;
5210 }
5211
5212 return VINF_SUCCESS;
5213}
5214
5215
5216/** Opcode 0x0f 0xa0. */
5217FNIEMOP_DEF(iemOp_push_fs)
5218{
5219 IEMOP_MNEMONIC(push_fs, "push fs");
5220 IEMOP_HLP_MIN_386();
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5223}
5224
5225
5226/** Opcode 0x0f 0xa1. */
5227FNIEMOP_DEF(iemOp_pop_fs)
5228{
5229 IEMOP_MNEMONIC(pop_fs, "pop fs");
5230 IEMOP_HLP_MIN_386();
5231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5232 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5233}
5234
5235
5236/** Opcode 0x0f 0xa2. */
5237FNIEMOP_DEF(iemOp_cpuid)
5238{
5239 IEMOP_MNEMONIC(cpuid, "cpuid");
5240 IEMOP_HLP_MIN_486(); /* not all 486es. */
5241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5242 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5243}
5244
5245
5246/**
5247 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5248 * iemOp_bts_Ev_Gv.
5249 */
5250FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5251{
5252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5254
5255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5256 {
5257 /* register destination. */
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259 switch (pVCpu->iem.s.enmEffOpSize)
5260 {
5261 case IEMMODE_16BIT:
5262 IEM_MC_BEGIN(3, 0);
5263 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5264 IEM_MC_ARG(uint16_t, u16Src, 1);
5265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5266
5267 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5268 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5269 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5270 IEM_MC_REF_EFLAGS(pEFlags);
5271 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5272
5273 IEM_MC_ADVANCE_RIP();
5274 IEM_MC_END();
5275 return VINF_SUCCESS;
5276
5277 case IEMMODE_32BIT:
5278 IEM_MC_BEGIN(3, 0);
5279 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5280 IEM_MC_ARG(uint32_t, u32Src, 1);
5281 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5282
5283 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5284 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5285 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5286 IEM_MC_REF_EFLAGS(pEFlags);
5287 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5288
5289 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5290 IEM_MC_ADVANCE_RIP();
5291 IEM_MC_END();
5292 return VINF_SUCCESS;
5293
5294 case IEMMODE_64BIT:
5295 IEM_MC_BEGIN(3, 0);
5296 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5297 IEM_MC_ARG(uint64_t, u64Src, 1);
5298 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5299
5300 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5301 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5302 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5303 IEM_MC_REF_EFLAGS(pEFlags);
5304 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5305
5306 IEM_MC_ADVANCE_RIP();
5307 IEM_MC_END();
5308 return VINF_SUCCESS;
5309
5310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5311 }
5312 }
5313 else
5314 {
5315 /* memory destination. */
5316
5317 uint32_t fAccess;
5318 if (pImpl->pfnLockedU16)
5319 fAccess = IEM_ACCESS_DATA_RW;
5320 else /* BT */
5321 fAccess = IEM_ACCESS_DATA_R;
5322
5323 /** @todo test negative bit offsets! */
5324 switch (pVCpu->iem.s.enmEffOpSize)
5325 {
5326 case IEMMODE_16BIT:
5327 IEM_MC_BEGIN(3, 2);
5328 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5329 IEM_MC_ARG(uint16_t, u16Src, 1);
5330 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5332 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5333
5334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5335 if (pImpl->pfnLockedU16)
5336 IEMOP_HLP_DONE_DECODING();
5337 else
5338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5339 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5340 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5341 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5342 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5343 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5344 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5345 IEM_MC_FETCH_EFLAGS(EFlags);
5346
5347 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5348 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5349 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5350 else
5351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5352 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5353
5354 IEM_MC_COMMIT_EFLAGS(EFlags);
5355 IEM_MC_ADVANCE_RIP();
5356 IEM_MC_END();
5357 return VINF_SUCCESS;
5358
5359 case IEMMODE_32BIT:
5360 IEM_MC_BEGIN(3, 2);
5361 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5362 IEM_MC_ARG(uint32_t, u32Src, 1);
5363 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5365 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5366
5367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5368 if (pImpl->pfnLockedU16)
5369 IEMOP_HLP_DONE_DECODING();
5370 else
5371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5372 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5373 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5374 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5375 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5376 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5377 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5378 IEM_MC_FETCH_EFLAGS(EFlags);
5379
5380 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5381 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5382 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5383 else
5384 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5386
5387 IEM_MC_COMMIT_EFLAGS(EFlags);
5388 IEM_MC_ADVANCE_RIP();
5389 IEM_MC_END();
5390 return VINF_SUCCESS;
5391
5392 case IEMMODE_64BIT:
5393 IEM_MC_BEGIN(3, 2);
5394 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5395 IEM_MC_ARG(uint64_t, u64Src, 1);
5396 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5398 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5399
5400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5401 if (pImpl->pfnLockedU16)
5402 IEMOP_HLP_DONE_DECODING();
5403 else
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5406 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5407 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5408 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5409 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5410 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5411 IEM_MC_FETCH_EFLAGS(EFlags);
5412
5413 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5414 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5415 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5416 else
5417 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5418 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5419
5420 IEM_MC_COMMIT_EFLAGS(EFlags);
5421 IEM_MC_ADVANCE_RIP();
5422 IEM_MC_END();
5423 return VINF_SUCCESS;
5424
5425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5426 }
5427 }
5428}
5429
5430
5431/** Opcode 0x0f 0xa3. */
5432FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5433{
5434 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5435 IEMOP_HLP_MIN_386();
5436 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5437}
5438
5439
5440/**
5441 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5442 */
5443FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5444{
5445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5446 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5447
5448 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5449 {
5450 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5452
5453 switch (pVCpu->iem.s.enmEffOpSize)
5454 {
5455 case IEMMODE_16BIT:
5456 IEM_MC_BEGIN(4, 0);
5457 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5458 IEM_MC_ARG(uint16_t, u16Src, 1);
5459 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5460 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5461
5462 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5463 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5464 IEM_MC_REF_EFLAGS(pEFlags);
5465 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5466
5467 IEM_MC_ADVANCE_RIP();
5468 IEM_MC_END();
5469 return VINF_SUCCESS;
5470
5471 case IEMMODE_32BIT:
5472 IEM_MC_BEGIN(4, 0);
5473 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5474 IEM_MC_ARG(uint32_t, u32Src, 1);
5475 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5476 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5477
5478 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5479 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5480 IEM_MC_REF_EFLAGS(pEFlags);
5481 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5482
5483 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5484 IEM_MC_ADVANCE_RIP();
5485 IEM_MC_END();
5486 return VINF_SUCCESS;
5487
5488 case IEMMODE_64BIT:
5489 IEM_MC_BEGIN(4, 0);
5490 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5491 IEM_MC_ARG(uint64_t, u64Src, 1);
5492 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5493 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5494
5495 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5496 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5497 IEM_MC_REF_EFLAGS(pEFlags);
5498 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5499
5500 IEM_MC_ADVANCE_RIP();
5501 IEM_MC_END();
5502 return VINF_SUCCESS;
5503
5504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5505 }
5506 }
5507 else
5508 {
5509 switch (pVCpu->iem.s.enmEffOpSize)
5510 {
5511 case IEMMODE_16BIT:
5512 IEM_MC_BEGIN(4, 2);
5513 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5514 IEM_MC_ARG(uint16_t, u16Src, 1);
5515 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5516 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5518
5519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5520 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5521 IEM_MC_ASSIGN(cShiftArg, cShift);
5522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5523 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5524 IEM_MC_FETCH_EFLAGS(EFlags);
5525 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5526 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5527
5528 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5529 IEM_MC_COMMIT_EFLAGS(EFlags);
5530 IEM_MC_ADVANCE_RIP();
5531 IEM_MC_END();
5532 return VINF_SUCCESS;
5533
5534 case IEMMODE_32BIT:
5535 IEM_MC_BEGIN(4, 2);
5536 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5537 IEM_MC_ARG(uint32_t, u32Src, 1);
5538 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5539 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5541
5542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5543 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5544 IEM_MC_ASSIGN(cShiftArg, cShift);
5545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5546 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5547 IEM_MC_FETCH_EFLAGS(EFlags);
5548 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5549 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5550
5551 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5552 IEM_MC_COMMIT_EFLAGS(EFlags);
5553 IEM_MC_ADVANCE_RIP();
5554 IEM_MC_END();
5555 return VINF_SUCCESS;
5556
5557 case IEMMODE_64BIT:
5558 IEM_MC_BEGIN(4, 2);
5559 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5560 IEM_MC_ARG(uint64_t, u64Src, 1);
5561 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5562 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5564
5565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5566 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5567 IEM_MC_ASSIGN(cShiftArg, cShift);
5568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5569 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5570 IEM_MC_FETCH_EFLAGS(EFlags);
5571 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5572 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5573
5574 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5575 IEM_MC_COMMIT_EFLAGS(EFlags);
5576 IEM_MC_ADVANCE_RIP();
5577 IEM_MC_END();
5578 return VINF_SUCCESS;
5579
5580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5581 }
5582 }
5583}
5584
5585
5586/**
5587 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5588 */
5589FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5590{
5591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5592 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5593
5594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5595 {
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597
5598 switch (pVCpu->iem.s.enmEffOpSize)
5599 {
5600 case IEMMODE_16BIT:
5601 IEM_MC_BEGIN(4, 0);
5602 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5603 IEM_MC_ARG(uint16_t, u16Src, 1);
5604 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5605 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5606
5607 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5608 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5609 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5610 IEM_MC_REF_EFLAGS(pEFlags);
5611 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5612
5613 IEM_MC_ADVANCE_RIP();
5614 IEM_MC_END();
5615 return VINF_SUCCESS;
5616
5617 case IEMMODE_32BIT:
5618 IEM_MC_BEGIN(4, 0);
5619 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5620 IEM_MC_ARG(uint32_t, u32Src, 1);
5621 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5622 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5623
5624 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5625 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5626 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5627 IEM_MC_REF_EFLAGS(pEFlags);
5628 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5629
5630 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5631 IEM_MC_ADVANCE_RIP();
5632 IEM_MC_END();
5633 return VINF_SUCCESS;
5634
5635 case IEMMODE_64BIT:
5636 IEM_MC_BEGIN(4, 0);
5637 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5638 IEM_MC_ARG(uint64_t, u64Src, 1);
5639 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5640 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5641
5642 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5643 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5644 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5645 IEM_MC_REF_EFLAGS(pEFlags);
5646 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5647
5648 IEM_MC_ADVANCE_RIP();
5649 IEM_MC_END();
5650 return VINF_SUCCESS;
5651
5652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5653 }
5654 }
5655 else
5656 {
5657 switch (pVCpu->iem.s.enmEffOpSize)
5658 {
5659 case IEMMODE_16BIT:
5660 IEM_MC_BEGIN(4, 2);
5661 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5662 IEM_MC_ARG(uint16_t, u16Src, 1);
5663 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5664 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5666
5667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5669 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5670 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5671 IEM_MC_FETCH_EFLAGS(EFlags);
5672 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5673 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5674
5675 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5676 IEM_MC_COMMIT_EFLAGS(EFlags);
5677 IEM_MC_ADVANCE_RIP();
5678 IEM_MC_END();
5679 return VINF_SUCCESS;
5680
5681 case IEMMODE_32BIT:
5682 IEM_MC_BEGIN(4, 2);
5683 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5684 IEM_MC_ARG(uint32_t, u32Src, 1);
5685 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5686 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5688
5689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5691 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5692 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5693 IEM_MC_FETCH_EFLAGS(EFlags);
5694 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5695 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5696
5697 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5698 IEM_MC_COMMIT_EFLAGS(EFlags);
5699 IEM_MC_ADVANCE_RIP();
5700 IEM_MC_END();
5701 return VINF_SUCCESS;
5702
5703 case IEMMODE_64BIT:
5704 IEM_MC_BEGIN(4, 2);
5705 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5706 IEM_MC_ARG(uint64_t, u64Src, 1);
5707 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5708 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5710
5711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5713 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5714 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5715 IEM_MC_FETCH_EFLAGS(EFlags);
5716 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5717 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5718
5719 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5720 IEM_MC_COMMIT_EFLAGS(EFlags);
5721 IEM_MC_ADVANCE_RIP();
5722 IEM_MC_END();
5723 return VINF_SUCCESS;
5724
5725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5726 }
5727 }
5728}
5729
5730
5731
5732/** Opcode 0x0f 0xa4. */
5733FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5734{
5735 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5736 IEMOP_HLP_MIN_386();
5737 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5738}
5739
5740
5741/** Opcode 0x0f 0xa5. */
5742FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5743{
5744 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5745 IEMOP_HLP_MIN_386();
5746 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5747}
5748
5749
5750/** Opcode 0x0f 0xa8. */
5751FNIEMOP_DEF(iemOp_push_gs)
5752{
5753 IEMOP_MNEMONIC(push_gs, "push gs");
5754 IEMOP_HLP_MIN_386();
5755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5756 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5757}
5758
5759
5760/** Opcode 0x0f 0xa9. */
5761FNIEMOP_DEF(iemOp_pop_gs)
5762{
5763 IEMOP_MNEMONIC(pop_gs, "pop gs");
5764 IEMOP_HLP_MIN_386();
5765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5766 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5767}
5768
5769
5770/** Opcode 0x0f 0xaa. */
5771FNIEMOP_STUB(iemOp_rsm);
5772//IEMOP_HLP_MIN_386();
5773
5774
5775/** Opcode 0x0f 0xab. */
5776FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5777{
5778 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5779 IEMOP_HLP_MIN_386();
5780 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5781}
5782
5783
5784/** Opcode 0x0f 0xac. */
5785FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5786{
5787 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5788 IEMOP_HLP_MIN_386();
5789 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5790}
5791
5792
5793/** Opcode 0x0f 0xad. */
5794FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5795{
5796 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5797 IEMOP_HLP_MIN_386();
5798 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5799}
5800
5801
5802/** Opcode 0x0f 0xae mem/0. */
5803FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5804{
5805 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5806 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5807 return IEMOP_RAISE_INVALID_OPCODE();
5808
5809 IEM_MC_BEGIN(3, 1);
5810 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5811 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5812 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5815 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5816 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5817 IEM_MC_END();
5818 return VINF_SUCCESS;
5819}
5820
5821
5822/** Opcode 0x0f 0xae mem/1. */
5823FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5824{
5825 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5826 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5827 return IEMOP_RAISE_INVALID_OPCODE();
5828
5829 IEM_MC_BEGIN(3, 1);
5830 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5831 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5832 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5835 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5836 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5837 IEM_MC_END();
5838 return VINF_SUCCESS;
5839}
5840
5841
5842/** Opcode 0x0f 0xae mem/2. */
5843FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5844
5845/** Opcode 0x0f 0xae mem/3. */
5846FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5847
5848/** Opcode 0x0f 0xae mem/4. */
5849FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5850
5851/** Opcode 0x0f 0xae mem/5. */
5852FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5853
5854/** Opcode 0x0f 0xae mem/6. */
5855FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5856
5857/** Opcode 0x0f 0xae mem/7. */
5858FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5859
5860
5861/** Opcode 0x0f 0xae 11b/5. */
5862FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5863{
5864 RT_NOREF_PV(bRm);
5865 IEMOP_MNEMONIC(lfence, "lfence");
5866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5867 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5868 return IEMOP_RAISE_INVALID_OPCODE();
5869
5870 IEM_MC_BEGIN(0, 0);
5871 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5872 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5873 else
5874 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5875 IEM_MC_ADVANCE_RIP();
5876 IEM_MC_END();
5877 return VINF_SUCCESS;
5878}
5879
5880
5881/** Opcode 0x0f 0xae 11b/6. */
5882FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5883{
5884 RT_NOREF_PV(bRm);
5885 IEMOP_MNEMONIC(mfence, "mfence");
5886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5887 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5888 return IEMOP_RAISE_INVALID_OPCODE();
5889
5890 IEM_MC_BEGIN(0, 0);
5891 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5892 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5893 else
5894 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5895 IEM_MC_ADVANCE_RIP();
5896 IEM_MC_END();
5897 return VINF_SUCCESS;
5898}
5899
5900
5901/** Opcode 0x0f 0xae 11b/7. */
5902FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5903{
5904 RT_NOREF_PV(bRm);
5905 IEMOP_MNEMONIC(sfence, "sfence");
5906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5907 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5908 return IEMOP_RAISE_INVALID_OPCODE();
5909
5910 IEM_MC_BEGIN(0, 0);
5911 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5912 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5913 else
5914 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5915 IEM_MC_ADVANCE_RIP();
5916 IEM_MC_END();
5917 return VINF_SUCCESS;
5918}
5919
5920
5921/** Opcode 0xf3 0x0f 0xae 11b/0. */
5922FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5923
5924/** Opcode 0xf3 0x0f 0xae 11b/1. */
5925FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5926
5927/** Opcode 0xf3 0x0f 0xae 11b/2. */
5928FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5929
5930/** Opcode 0xf3 0x0f 0xae 11b/3. */
5931FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5932
5933
5934/** Opcode 0x0f 0xae. */
5935FNIEMOP_DEF(iemOp_Grp15)
5936{
5937 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5939 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5940 {
5941 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5942 {
5943 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5944 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5945 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5946 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5947 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5948 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5949 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5950 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5952 }
5953 }
5954 else
5955 {
5956 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5957 {
5958 case 0:
5959 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5960 {
5961 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5962 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5963 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5964 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5965 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5966 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5967 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5968 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5970 }
5971 break;
5972
5973 case IEM_OP_PRF_REPZ:
5974 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5975 {
5976 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5977 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5978 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5979 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5980 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5981 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5982 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5983 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5985 }
5986 break;
5987
5988 default:
5989 return IEMOP_RAISE_INVALID_OPCODE();
5990 }
5991 }
5992}
5993
5994
5995/** Opcode 0x0f 0xaf. */
5996FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5997{
5998 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5999 IEMOP_HLP_MIN_386();
6000 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6001 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6002}
6003
6004
6005/** Opcode 0x0f 0xb0. */
6006FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6007{
6008 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6009 IEMOP_HLP_MIN_486();
6010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6011
6012 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6013 {
6014 IEMOP_HLP_DONE_DECODING();
6015 IEM_MC_BEGIN(4, 0);
6016 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6017 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6018 IEM_MC_ARG(uint8_t, u8Src, 2);
6019 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6020
6021 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6022 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6023 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6024 IEM_MC_REF_EFLAGS(pEFlags);
6025 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6026 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6027 else
6028 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6029
6030 IEM_MC_ADVANCE_RIP();
6031 IEM_MC_END();
6032 }
6033 else
6034 {
6035 IEM_MC_BEGIN(4, 3);
6036 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6037 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6038 IEM_MC_ARG(uint8_t, u8Src, 2);
6039 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6041 IEM_MC_LOCAL(uint8_t, u8Al);
6042
6043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6044 IEMOP_HLP_DONE_DECODING();
6045 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6046 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6047 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6048 IEM_MC_FETCH_EFLAGS(EFlags);
6049 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6050 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6051 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6052 else
6053 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6054
6055 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6056 IEM_MC_COMMIT_EFLAGS(EFlags);
6057 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6058 IEM_MC_ADVANCE_RIP();
6059 IEM_MC_END();
6060 }
6061 return VINF_SUCCESS;
6062}
6063
6064/** Opcode 0x0f 0xb1. */
6065FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6066{
6067 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6068 IEMOP_HLP_MIN_486();
6069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6070
6071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6072 {
6073 IEMOP_HLP_DONE_DECODING();
6074 switch (pVCpu->iem.s.enmEffOpSize)
6075 {
6076 case IEMMODE_16BIT:
6077 IEM_MC_BEGIN(4, 0);
6078 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6079 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6080 IEM_MC_ARG(uint16_t, u16Src, 2);
6081 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6082
6083 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6084 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6085 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6086 IEM_MC_REF_EFLAGS(pEFlags);
6087 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6088 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6089 else
6090 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6091
6092 IEM_MC_ADVANCE_RIP();
6093 IEM_MC_END();
6094 return VINF_SUCCESS;
6095
6096 case IEMMODE_32BIT:
6097 IEM_MC_BEGIN(4, 0);
6098 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6099 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6100 IEM_MC_ARG(uint32_t, u32Src, 2);
6101 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6102
6103 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6104 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6105 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6106 IEM_MC_REF_EFLAGS(pEFlags);
6107 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6108 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6109 else
6110 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6111
6112 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6113 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 return VINF_SUCCESS;
6117
6118 case IEMMODE_64BIT:
6119 IEM_MC_BEGIN(4, 0);
6120 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6121 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6122#ifdef RT_ARCH_X86
6123 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6124#else
6125 IEM_MC_ARG(uint64_t, u64Src, 2);
6126#endif
6127 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6128
6129 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6130 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6131 IEM_MC_REF_EFLAGS(pEFlags);
6132#ifdef RT_ARCH_X86
6133 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6134 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6136 else
6137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6138#else
6139 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6140 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6141 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6142 else
6143 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6144#endif
6145
6146 IEM_MC_ADVANCE_RIP();
6147 IEM_MC_END();
6148 return VINF_SUCCESS;
6149
6150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6151 }
6152 }
6153 else
6154 {
6155 switch (pVCpu->iem.s.enmEffOpSize)
6156 {
6157 case IEMMODE_16BIT:
6158 IEM_MC_BEGIN(4, 3);
6159 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6160 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6161 IEM_MC_ARG(uint16_t, u16Src, 2);
6162 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6164 IEM_MC_LOCAL(uint16_t, u16Ax);
6165
6166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6167 IEMOP_HLP_DONE_DECODING();
6168 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6169 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6170 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6171 IEM_MC_FETCH_EFLAGS(EFlags);
6172 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6173 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6174 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6175 else
6176 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6177
6178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6179 IEM_MC_COMMIT_EFLAGS(EFlags);
6180 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6181 IEM_MC_ADVANCE_RIP();
6182 IEM_MC_END();
6183 return VINF_SUCCESS;
6184
6185 case IEMMODE_32BIT:
6186 IEM_MC_BEGIN(4, 3);
6187 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6188 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6189 IEM_MC_ARG(uint32_t, u32Src, 2);
6190 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6192 IEM_MC_LOCAL(uint32_t, u32Eax);
6193
6194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6195 IEMOP_HLP_DONE_DECODING();
6196 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6197 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6198 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6199 IEM_MC_FETCH_EFLAGS(EFlags);
6200 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6201 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6202 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6203 else
6204 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6205
6206 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6207 IEM_MC_COMMIT_EFLAGS(EFlags);
6208 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6209 IEM_MC_ADVANCE_RIP();
6210 IEM_MC_END();
6211 return VINF_SUCCESS;
6212
6213 case IEMMODE_64BIT:
6214 IEM_MC_BEGIN(4, 3);
6215 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6216 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6217#ifdef RT_ARCH_X86
6218 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6219#else
6220 IEM_MC_ARG(uint64_t, u64Src, 2);
6221#endif
6222 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6224 IEM_MC_LOCAL(uint64_t, u64Rax);
6225
6226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6227 IEMOP_HLP_DONE_DECODING();
6228 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6229 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6230 IEM_MC_FETCH_EFLAGS(EFlags);
6231 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6232#ifdef RT_ARCH_X86
6233 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6234 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6235 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6236 else
6237 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6238#else
6239 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6241 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6242 else
6243 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6244#endif
6245
6246 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6247 IEM_MC_COMMIT_EFLAGS(EFlags);
6248 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6249 IEM_MC_ADVANCE_RIP();
6250 IEM_MC_END();
6251 return VINF_SUCCESS;
6252
6253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6254 }
6255 }
6256}
6257
6258
6259FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6260{
6261 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6262 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6263
6264 switch (pVCpu->iem.s.enmEffOpSize)
6265 {
6266 case IEMMODE_16BIT:
6267 IEM_MC_BEGIN(5, 1);
6268 IEM_MC_ARG(uint16_t, uSel, 0);
6269 IEM_MC_ARG(uint16_t, offSeg, 1);
6270 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6271 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6272 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6273 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6276 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6277 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6278 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6279 IEM_MC_END();
6280 return VINF_SUCCESS;
6281
6282 case IEMMODE_32BIT:
6283 IEM_MC_BEGIN(5, 1);
6284 IEM_MC_ARG(uint16_t, uSel, 0);
6285 IEM_MC_ARG(uint32_t, offSeg, 1);
6286 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6287 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6288 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6289 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6292 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6293 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6294 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6295 IEM_MC_END();
6296 return VINF_SUCCESS;
6297
6298 case IEMMODE_64BIT:
6299 IEM_MC_BEGIN(5, 1);
6300 IEM_MC_ARG(uint16_t, uSel, 0);
6301 IEM_MC_ARG(uint64_t, offSeg, 1);
6302 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6303 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6304 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6305 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6308 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6309 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6310 else
6311 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6312 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6313 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6314 IEM_MC_END();
6315 return VINF_SUCCESS;
6316
6317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6318 }
6319}
6320
6321
6322/** Opcode 0x0f 0xb2. */
6323FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6324{
6325 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6326 IEMOP_HLP_MIN_386();
6327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6328 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6329 return IEMOP_RAISE_INVALID_OPCODE();
6330 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6331}
6332
6333
6334/** Opcode 0x0f 0xb3. */
6335FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6336{
6337 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6338 IEMOP_HLP_MIN_386();
6339 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6340}
6341
6342
6343/** Opcode 0x0f 0xb4. */
6344FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6345{
6346 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6347 IEMOP_HLP_MIN_386();
6348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6349 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6350 return IEMOP_RAISE_INVALID_OPCODE();
6351 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6352}
6353
6354
6355/** Opcode 0x0f 0xb5. */
6356FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6357{
6358 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6359 IEMOP_HLP_MIN_386();
6360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6361 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6362 return IEMOP_RAISE_INVALID_OPCODE();
6363 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6364}
6365
6366
6367/** Opcode 0x0f 0xb6. */
6368FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6369{
6370 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6371 IEMOP_HLP_MIN_386();
6372
6373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6374
6375 /*
6376 * If rm is denoting a register, no more instruction bytes.
6377 */
6378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6379 {
6380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6381 switch (pVCpu->iem.s.enmEffOpSize)
6382 {
6383 case IEMMODE_16BIT:
6384 IEM_MC_BEGIN(0, 1);
6385 IEM_MC_LOCAL(uint16_t, u16Value);
6386 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6387 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6388 IEM_MC_ADVANCE_RIP();
6389 IEM_MC_END();
6390 return VINF_SUCCESS;
6391
6392 case IEMMODE_32BIT:
6393 IEM_MC_BEGIN(0, 1);
6394 IEM_MC_LOCAL(uint32_t, u32Value);
6395 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6396 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 case IEMMODE_64BIT:
6402 IEM_MC_BEGIN(0, 1);
6403 IEM_MC_LOCAL(uint64_t, u64Value);
6404 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6405 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6406 IEM_MC_ADVANCE_RIP();
6407 IEM_MC_END();
6408 return VINF_SUCCESS;
6409
6410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6411 }
6412 }
6413 else
6414 {
6415 /*
6416 * We're loading a register from memory.
6417 */
6418 switch (pVCpu->iem.s.enmEffOpSize)
6419 {
6420 case IEMMODE_16BIT:
6421 IEM_MC_BEGIN(0, 2);
6422 IEM_MC_LOCAL(uint16_t, u16Value);
6423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6426 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6427 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6428 IEM_MC_ADVANCE_RIP();
6429 IEM_MC_END();
6430 return VINF_SUCCESS;
6431
6432 case IEMMODE_32BIT:
6433 IEM_MC_BEGIN(0, 2);
6434 IEM_MC_LOCAL(uint32_t, u32Value);
6435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6438 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6439 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6440 IEM_MC_ADVANCE_RIP();
6441 IEM_MC_END();
6442 return VINF_SUCCESS;
6443
6444 case IEMMODE_64BIT:
6445 IEM_MC_BEGIN(0, 2);
6446 IEM_MC_LOCAL(uint64_t, u64Value);
6447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6450 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6451 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6452 IEM_MC_ADVANCE_RIP();
6453 IEM_MC_END();
6454 return VINF_SUCCESS;
6455
6456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6457 }
6458 }
6459}
6460
6461
6462/** Opcode 0x0f 0xb7. */
6463FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6464{
6465 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6466 IEMOP_HLP_MIN_386();
6467
6468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6469
6470 /** @todo Not entirely sure how the operand size prefix is handled here,
6471 * assuming that it will be ignored. Would be nice to have a few
6472 * test for this. */
6473 /*
6474 * If rm is denoting a register, no more instruction bytes.
6475 */
6476 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6477 {
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6480 {
6481 IEM_MC_BEGIN(0, 1);
6482 IEM_MC_LOCAL(uint32_t, u32Value);
6483 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6484 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6485 IEM_MC_ADVANCE_RIP();
6486 IEM_MC_END();
6487 }
6488 else
6489 {
6490 IEM_MC_BEGIN(0, 1);
6491 IEM_MC_LOCAL(uint64_t, u64Value);
6492 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6493 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6494 IEM_MC_ADVANCE_RIP();
6495 IEM_MC_END();
6496 }
6497 }
6498 else
6499 {
6500 /*
6501 * We're loading a register from memory.
6502 */
6503 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6504 {
6505 IEM_MC_BEGIN(0, 2);
6506 IEM_MC_LOCAL(uint32_t, u32Value);
6507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6510 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6511 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6512 IEM_MC_ADVANCE_RIP();
6513 IEM_MC_END();
6514 }
6515 else
6516 {
6517 IEM_MC_BEGIN(0, 2);
6518 IEM_MC_LOCAL(uint64_t, u64Value);
6519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6522 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6523 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6524 IEM_MC_ADVANCE_RIP();
6525 IEM_MC_END();
6526 }
6527 }
6528 return VINF_SUCCESS;
6529}
6530
6531
6532/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6533FNIEMOP_UD_STUB(iemOp_jmpe);
6534/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6535FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6536
6537
6538/** Opcode 0x0f 0xb9. */
6539FNIEMOP_DEF(iemOp_Grp10)
6540{
6541 Log(("iemOp_Grp10 -> #UD\n"));
6542 return IEMOP_RAISE_INVALID_OPCODE();
6543}
6544
6545
6546/** Opcode 0x0f 0xba. */
6547FNIEMOP_DEF(iemOp_Grp8)
6548{
6549 IEMOP_HLP_MIN_386();
6550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6551 PCIEMOPBINSIZES pImpl;
6552 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6553 {
6554 case 0: case 1: case 2: case 3:
6555 return IEMOP_RAISE_INVALID_OPCODE();
6556 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6557 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6558 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6559 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6561 }
6562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6563
6564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6565 {
6566 /* register destination. */
6567 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6569
6570 switch (pVCpu->iem.s.enmEffOpSize)
6571 {
6572 case IEMMODE_16BIT:
6573 IEM_MC_BEGIN(3, 0);
6574 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6575 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6576 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6577
6578 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6579 IEM_MC_REF_EFLAGS(pEFlags);
6580 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6581
6582 IEM_MC_ADVANCE_RIP();
6583 IEM_MC_END();
6584 return VINF_SUCCESS;
6585
6586 case IEMMODE_32BIT:
6587 IEM_MC_BEGIN(3, 0);
6588 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6589 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6590 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6591
6592 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6593 IEM_MC_REF_EFLAGS(pEFlags);
6594 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6595
6596 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6597 IEM_MC_ADVANCE_RIP();
6598 IEM_MC_END();
6599 return VINF_SUCCESS;
6600
6601 case IEMMODE_64BIT:
6602 IEM_MC_BEGIN(3, 0);
6603 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6604 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6605 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6606
6607 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6608 IEM_MC_REF_EFLAGS(pEFlags);
6609 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6610
6611 IEM_MC_ADVANCE_RIP();
6612 IEM_MC_END();
6613 return VINF_SUCCESS;
6614
6615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6616 }
6617 }
6618 else
6619 {
6620 /* memory destination. */
6621
6622 uint32_t fAccess;
6623 if (pImpl->pfnLockedU16)
6624 fAccess = IEM_ACCESS_DATA_RW;
6625 else /* BT */
6626 fAccess = IEM_ACCESS_DATA_R;
6627
6628 /** @todo test negative bit offsets! */
6629 switch (pVCpu->iem.s.enmEffOpSize)
6630 {
6631 case IEMMODE_16BIT:
6632 IEM_MC_BEGIN(3, 1);
6633 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6634 IEM_MC_ARG(uint16_t, u16Src, 1);
6635 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6637
6638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6639 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6640 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6641 if (pImpl->pfnLockedU16)
6642 IEMOP_HLP_DONE_DECODING();
6643 else
6644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6645 IEM_MC_FETCH_EFLAGS(EFlags);
6646 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6647 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6648 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6649 else
6650 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6651 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6652
6653 IEM_MC_COMMIT_EFLAGS(EFlags);
6654 IEM_MC_ADVANCE_RIP();
6655 IEM_MC_END();
6656 return VINF_SUCCESS;
6657
6658 case IEMMODE_32BIT:
6659 IEM_MC_BEGIN(3, 1);
6660 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6661 IEM_MC_ARG(uint32_t, u32Src, 1);
6662 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6664
6665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6666 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6667 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6668 if (pImpl->pfnLockedU16)
6669 IEMOP_HLP_DONE_DECODING();
6670 else
6671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6672 IEM_MC_FETCH_EFLAGS(EFlags);
6673 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6674 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6675 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6676 else
6677 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6678 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6679
6680 IEM_MC_COMMIT_EFLAGS(EFlags);
6681 IEM_MC_ADVANCE_RIP();
6682 IEM_MC_END();
6683 return VINF_SUCCESS;
6684
6685 case IEMMODE_64BIT:
6686 IEM_MC_BEGIN(3, 1);
6687 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6688 IEM_MC_ARG(uint64_t, u64Src, 1);
6689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6691
6692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6693 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6694 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6695 if (pImpl->pfnLockedU16)
6696 IEMOP_HLP_DONE_DECODING();
6697 else
6698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6699 IEM_MC_FETCH_EFLAGS(EFlags);
6700 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6701 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6702 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6703 else
6704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6705 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6706
6707 IEM_MC_COMMIT_EFLAGS(EFlags);
6708 IEM_MC_ADVANCE_RIP();
6709 IEM_MC_END();
6710 return VINF_SUCCESS;
6711
6712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6713 }
6714 }
6715
6716}
6717
6718
6719/** Opcode 0x0f 0xbb. */
6720FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6721{
6722 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6723 IEMOP_HLP_MIN_386();
6724 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6725}
6726
6727
6728/** Opcode 0x0f 0xbc. */
6729FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6730{
6731 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6732 IEMOP_HLP_MIN_386();
6733 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6734 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6735}
6736
6737
6738/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6739FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6740
6741
6742/** Opcode 0x0f 0xbd. */
6743FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6744{
6745 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6746 IEMOP_HLP_MIN_386();
6747 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6748 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6749}
6750
6751
6752/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6753FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6754
6755
6756/** Opcode 0x0f 0xbe. */
6757FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6758{
6759 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6760 IEMOP_HLP_MIN_386();
6761
6762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6763
6764 /*
6765 * If rm is denoting a register, no more instruction bytes.
6766 */
6767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6768 {
6769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6770 switch (pVCpu->iem.s.enmEffOpSize)
6771 {
6772 case IEMMODE_16BIT:
6773 IEM_MC_BEGIN(0, 1);
6774 IEM_MC_LOCAL(uint16_t, u16Value);
6775 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6776 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6777 IEM_MC_ADVANCE_RIP();
6778 IEM_MC_END();
6779 return VINF_SUCCESS;
6780
6781 case IEMMODE_32BIT:
6782 IEM_MC_BEGIN(0, 1);
6783 IEM_MC_LOCAL(uint32_t, u32Value);
6784 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6785 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6786 IEM_MC_ADVANCE_RIP();
6787 IEM_MC_END();
6788 return VINF_SUCCESS;
6789
6790 case IEMMODE_64BIT:
6791 IEM_MC_BEGIN(0, 1);
6792 IEM_MC_LOCAL(uint64_t, u64Value);
6793 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6794 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6795 IEM_MC_ADVANCE_RIP();
6796 IEM_MC_END();
6797 return VINF_SUCCESS;
6798
6799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6800 }
6801 }
6802 else
6803 {
6804 /*
6805 * We're loading a register from memory.
6806 */
6807 switch (pVCpu->iem.s.enmEffOpSize)
6808 {
6809 case IEMMODE_16BIT:
6810 IEM_MC_BEGIN(0, 2);
6811 IEM_MC_LOCAL(uint16_t, u16Value);
6812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6815 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6816 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6817 IEM_MC_ADVANCE_RIP();
6818 IEM_MC_END();
6819 return VINF_SUCCESS;
6820
6821 case IEMMODE_32BIT:
6822 IEM_MC_BEGIN(0, 2);
6823 IEM_MC_LOCAL(uint32_t, u32Value);
6824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6827 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6828 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6829 IEM_MC_ADVANCE_RIP();
6830 IEM_MC_END();
6831 return VINF_SUCCESS;
6832
6833 case IEMMODE_64BIT:
6834 IEM_MC_BEGIN(0, 2);
6835 IEM_MC_LOCAL(uint64_t, u64Value);
6836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6839 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6840 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6841 IEM_MC_ADVANCE_RIP();
6842 IEM_MC_END();
6843 return VINF_SUCCESS;
6844
6845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6846 }
6847 }
6848}
6849
6850
6851/** Opcode 0x0f 0xbf. */
6852FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6853{
6854 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6855 IEMOP_HLP_MIN_386();
6856
6857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6858
6859 /** @todo Not entirely sure how the operand size prefix is handled here,
6860 * assuming that it will be ignored. Would be nice to have a few
6861 * test for this. */
6862 /*
6863 * If rm is denoting a register, no more instruction bytes.
6864 */
6865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6866 {
6867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6868 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6869 {
6870 IEM_MC_BEGIN(0, 1);
6871 IEM_MC_LOCAL(uint32_t, u32Value);
6872 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6873 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6874 IEM_MC_ADVANCE_RIP();
6875 IEM_MC_END();
6876 }
6877 else
6878 {
6879 IEM_MC_BEGIN(0, 1);
6880 IEM_MC_LOCAL(uint64_t, u64Value);
6881 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6882 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6883 IEM_MC_ADVANCE_RIP();
6884 IEM_MC_END();
6885 }
6886 }
6887 else
6888 {
6889 /*
6890 * We're loading a register from memory.
6891 */
6892 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6893 {
6894 IEM_MC_BEGIN(0, 2);
6895 IEM_MC_LOCAL(uint32_t, u32Value);
6896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6899 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6900 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6901 IEM_MC_ADVANCE_RIP();
6902 IEM_MC_END();
6903 }
6904 else
6905 {
6906 IEM_MC_BEGIN(0, 2);
6907 IEM_MC_LOCAL(uint64_t, u64Value);
6908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6911 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6912 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6913 IEM_MC_ADVANCE_RIP();
6914 IEM_MC_END();
6915 }
6916 }
6917 return VINF_SUCCESS;
6918}
6919
6920
6921/** Opcode 0x0f 0xc0. */
6922FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6923{
6924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6925 IEMOP_HLP_MIN_486();
6926 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6927
6928 /*
6929 * If rm is denoting a register, no more instruction bytes.
6930 */
6931 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6932 {
6933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6934
6935 IEM_MC_BEGIN(3, 0);
6936 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6937 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6938 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6939
6940 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6941 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6942 IEM_MC_REF_EFLAGS(pEFlags);
6943 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6944
6945 IEM_MC_ADVANCE_RIP();
6946 IEM_MC_END();
6947 }
6948 else
6949 {
6950 /*
6951 * We're accessing memory.
6952 */
6953 IEM_MC_BEGIN(3, 3);
6954 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6955 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6956 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6957 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6959
6960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6961 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6962 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6963 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6964 IEM_MC_FETCH_EFLAGS(EFlags);
6965 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6966 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6967 else
6968 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6969
6970 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6971 IEM_MC_COMMIT_EFLAGS(EFlags);
6972 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6973 IEM_MC_ADVANCE_RIP();
6974 IEM_MC_END();
6975 return VINF_SUCCESS;
6976 }
6977 return VINF_SUCCESS;
6978}
6979
6980
6981/** Opcode 0x0f 0xc1. */
6982FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6983{
6984 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6985 IEMOP_HLP_MIN_486();
6986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6987
6988 /*
6989 * If rm is denoting a register, no more instruction bytes.
6990 */
6991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6992 {
6993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6994
6995 switch (pVCpu->iem.s.enmEffOpSize)
6996 {
6997 case IEMMODE_16BIT:
6998 IEM_MC_BEGIN(3, 0);
6999 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7000 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7001 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7002
7003 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7004 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7005 IEM_MC_REF_EFLAGS(pEFlags);
7006 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7007
7008 IEM_MC_ADVANCE_RIP();
7009 IEM_MC_END();
7010 return VINF_SUCCESS;
7011
7012 case IEMMODE_32BIT:
7013 IEM_MC_BEGIN(3, 0);
7014 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7015 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7016 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7017
7018 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7019 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7020 IEM_MC_REF_EFLAGS(pEFlags);
7021 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7022
7023 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7024 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7025 IEM_MC_ADVANCE_RIP();
7026 IEM_MC_END();
7027 return VINF_SUCCESS;
7028
7029 case IEMMODE_64BIT:
7030 IEM_MC_BEGIN(3, 0);
7031 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7032 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7034
7035 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7036 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7037 IEM_MC_REF_EFLAGS(pEFlags);
7038 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7039
7040 IEM_MC_ADVANCE_RIP();
7041 IEM_MC_END();
7042 return VINF_SUCCESS;
7043
7044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7045 }
7046 }
7047 else
7048 {
7049 /*
7050 * We're accessing memory.
7051 */
7052 switch (pVCpu->iem.s.enmEffOpSize)
7053 {
7054 case IEMMODE_16BIT:
7055 IEM_MC_BEGIN(3, 3);
7056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7057 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7058 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7059 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7061
7062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7063 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7064 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7065 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7066 IEM_MC_FETCH_EFLAGS(EFlags);
7067 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7068 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7069 else
7070 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7071
7072 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7073 IEM_MC_COMMIT_EFLAGS(EFlags);
7074 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7075 IEM_MC_ADVANCE_RIP();
7076 IEM_MC_END();
7077 return VINF_SUCCESS;
7078
7079 case IEMMODE_32BIT:
7080 IEM_MC_BEGIN(3, 3);
7081 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7082 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7083 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7084 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7086
7087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7088 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7089 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7090 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7091 IEM_MC_FETCH_EFLAGS(EFlags);
7092 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7093 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7094 else
7095 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7096
7097 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7098 IEM_MC_COMMIT_EFLAGS(EFlags);
7099 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7100 IEM_MC_ADVANCE_RIP();
7101 IEM_MC_END();
7102 return VINF_SUCCESS;
7103
7104 case IEMMODE_64BIT:
7105 IEM_MC_BEGIN(3, 3);
7106 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7107 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7108 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7109 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7111
7112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7113 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7114 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7115 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7116 IEM_MC_FETCH_EFLAGS(EFlags);
7117 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7118 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7119 else
7120 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7121
7122 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7123 IEM_MC_COMMIT_EFLAGS(EFlags);
7124 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7125 IEM_MC_ADVANCE_RIP();
7126 IEM_MC_END();
7127 return VINF_SUCCESS;
7128
7129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7130 }
7131 }
7132}
7133
7134
7135/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7136FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7137/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7138FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7139/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7140FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7141/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7142FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7143
7144
7145/** Opcode 0x0f 0xc3. */
7146FNIEMOP_DEF(iemOp_movnti_My_Gy)
7147{
7148 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7149
7150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7151
7152 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7153 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7154 {
7155 switch (pVCpu->iem.s.enmEffOpSize)
7156 {
7157 case IEMMODE_32BIT:
7158 IEM_MC_BEGIN(0, 2);
7159 IEM_MC_LOCAL(uint32_t, u32Value);
7160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7161
7162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7164 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7165 return IEMOP_RAISE_INVALID_OPCODE();
7166
7167 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7168 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7169 IEM_MC_ADVANCE_RIP();
7170 IEM_MC_END();
7171 break;
7172
7173 case IEMMODE_64BIT:
7174 IEM_MC_BEGIN(0, 2);
7175 IEM_MC_LOCAL(uint64_t, u64Value);
7176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7177
7178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7180 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7181 return IEMOP_RAISE_INVALID_OPCODE();
7182
7183 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7184 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7185 IEM_MC_ADVANCE_RIP();
7186 IEM_MC_END();
7187 break;
7188
7189 case IEMMODE_16BIT:
7190 /** @todo check this form. */
7191 return IEMOP_RAISE_INVALID_OPCODE();
7192 }
7193 }
7194 else
7195 return IEMOP_RAISE_INVALID_OPCODE();
7196 return VINF_SUCCESS;
7197}
7198/* Opcode 0x66 0x0f 0xc3 - invalid */
7199/* Opcode 0xf3 0x0f 0xc3 - invalid */
7200/* Opcode 0xf2 0x0f 0xc3 - invalid */
7201
7202/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7203FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7204/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7205FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7206/* Opcode 0xf3 0x0f 0xc4 - invalid */
7207/* Opcode 0xf2 0x0f 0xc4 - invalid */
7208
7209/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7210FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7211/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7212FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7213/* Opcode 0xf3 0x0f 0xc5 - invalid */
7214/* Opcode 0xf2 0x0f 0xc5 - invalid */
7215
7216/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7217FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7218/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7219FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7220/* Opcode 0xf3 0x0f 0xc6 - invalid */
7221/* Opcode 0xf2 0x0f 0xc6 - invalid */
7222
7223
7224/** Opcode 0x0f 0xc7 !11/1. */
7225FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7226{
7227 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7228
7229 IEM_MC_BEGIN(4, 3);
7230 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7231 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7232 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7233 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7234 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7235 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7237
7238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7239 IEMOP_HLP_DONE_DECODING();
7240 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7241
7242 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7243 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7244 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7245
7246 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7247 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7248 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7249
7250 IEM_MC_FETCH_EFLAGS(EFlags);
7251 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7252 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7253 else
7254 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7255
7256 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7257 IEM_MC_COMMIT_EFLAGS(EFlags);
7258 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7259 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7260 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7261 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7262 IEM_MC_ENDIF();
7263 IEM_MC_ADVANCE_RIP();
7264
7265 IEM_MC_END();
7266 return VINF_SUCCESS;
7267}
7268
7269
7270/** Opcode REX.W 0x0f 0xc7 !11/1. */
7271FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7272{
7273 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7274 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7275 {
7276#if 0
7277 RT_NOREF(bRm);
7278 IEMOP_BITCH_ABOUT_STUB();
7279 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7280#else
7281 IEM_MC_BEGIN(4, 3);
7282 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7283 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7284 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7285 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7286 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7287 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7289
7290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7291 IEMOP_HLP_DONE_DECODING();
7292 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7293 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7294
7295 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7296 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7297 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7298
7299 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7300 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7301 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7302
7303 IEM_MC_FETCH_EFLAGS(EFlags);
7304# ifdef RT_ARCH_AMD64
7305 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7306 {
7307 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7308 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7309 else
7310 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7311 }
7312 else
7313# endif
7314 {
7315 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7316 accesses and not all all atomic, which works fine on in UNI CPU guest
7317 configuration (ignoring DMA). If guest SMP is active we have no choice
7318 but to use a rendezvous callback here. Sigh. */
7319 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7320 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7321 else
7322 {
7323 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7324 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7325 }
7326 }
7327
7328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7329 IEM_MC_COMMIT_EFLAGS(EFlags);
7330 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7331 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7332 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7333 IEM_MC_ENDIF();
7334 IEM_MC_ADVANCE_RIP();
7335
7336 IEM_MC_END();
7337 return VINF_SUCCESS;
7338#endif
7339 }
7340 Log(("cmpxchg16b -> #UD\n"));
7341 return IEMOP_RAISE_INVALID_OPCODE();
7342}
7343
7344
7345/** Opcode 0x0f 0xc7 11/6. */
7346FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7347
7348/** Opcode 0x0f 0xc7 !11/6. */
7349FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7350
7351/** Opcode 0x66 0x0f 0xc7 !11/6. */
7352FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7353
7354/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7355FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7356
7357/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7358FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7359
7360
7361/** Opcode 0x0f 0xc7. */
7362FNIEMOP_DEF(iemOp_Grp9)
7363{
7364 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7366 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7367 {
7368 case 0: case 2: case 3: case 4: case 5:
7369 return IEMOP_RAISE_INVALID_OPCODE();
7370 case 1:
7371 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7372 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7373 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7374 return IEMOP_RAISE_INVALID_OPCODE();
7375 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7376 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7377 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7378 case 6:
7379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7380 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7381 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7382 {
7383 case 0:
7384 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7385 case IEM_OP_PRF_SIZE_OP:
7386 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7387 case IEM_OP_PRF_REPZ:
7388 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7389 default:
7390 return IEMOP_RAISE_INVALID_OPCODE();
7391 }
7392 case 7:
7393 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7394 {
7395 case 0:
7396 case IEM_OP_PRF_REPZ:
7397 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7398 default:
7399 return IEMOP_RAISE_INVALID_OPCODE();
7400 }
7401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7402 }
7403}
7404
7405
7406/**
7407 * Common 'bswap register' helper.
7408 */
7409FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7410{
7411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7412 switch (pVCpu->iem.s.enmEffOpSize)
7413 {
7414 case IEMMODE_16BIT:
7415 IEM_MC_BEGIN(1, 0);
7416 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7417 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7418 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7419 IEM_MC_ADVANCE_RIP();
7420 IEM_MC_END();
7421 return VINF_SUCCESS;
7422
7423 case IEMMODE_32BIT:
7424 IEM_MC_BEGIN(1, 0);
7425 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7426 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7427 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7428 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7429 IEM_MC_ADVANCE_RIP();
7430 IEM_MC_END();
7431 return VINF_SUCCESS;
7432
7433 case IEMMODE_64BIT:
7434 IEM_MC_BEGIN(1, 0);
7435 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7436 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7437 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7438 IEM_MC_ADVANCE_RIP();
7439 IEM_MC_END();
7440 return VINF_SUCCESS;
7441
7442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7443 }
7444}
7445
7446
7447/** Opcode 0x0f 0xc8. */
7448FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7449{
7450 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7451 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7452 prefix. REX.B is the correct prefix it appears. For a parallel
7453 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7454 IEMOP_HLP_MIN_486();
7455 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7456}
7457
7458
7459/** Opcode 0x0f 0xc9. */
7460FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7461{
7462 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7463 IEMOP_HLP_MIN_486();
7464 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7465}
7466
7467
7468/** Opcode 0x0f 0xca. */
7469FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7470{
7471 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7472 IEMOP_HLP_MIN_486();
7473 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7474}
7475
7476
7477/** Opcode 0x0f 0xcb. */
7478FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7479{
7480 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7481 IEMOP_HLP_MIN_486();
7482 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7483}
7484
7485
7486/** Opcode 0x0f 0xcc. */
7487FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7488{
7489 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7490 IEMOP_HLP_MIN_486();
7491 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7492}
7493
7494
7495/** Opcode 0x0f 0xcd. */
7496FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7497{
7498 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7499 IEMOP_HLP_MIN_486();
7500 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7501}
7502
7503
7504/** Opcode 0x0f 0xce. */
7505FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7506{
7507 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7508 IEMOP_HLP_MIN_486();
7509 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7510}
7511
7512
7513/** Opcode 0x0f 0xcf. */
7514FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7515{
7516 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7517 IEMOP_HLP_MIN_486();
7518 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7519}
7520
7521
7522/* Opcode 0x0f 0xd0 - invalid */
7523/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7524FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7525/* Opcode 0xf3 0x0f 0xd0 - invalid */
7526/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7527FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7528
7529/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7530FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7531/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7532FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7533/* Opcode 0xf3 0x0f 0xd1 - invalid */
7534/* Opcode 0xf2 0x0f 0xd1 - invalid */
7535
7536/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7537FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7538/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7539FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7540/* Opcode 0xf3 0x0f 0xd2 - invalid */
7541/* Opcode 0xf2 0x0f 0xd2 - invalid */
7542
7543/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7544FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7545/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7546FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7547/* Opcode 0xf3 0x0f 0xd3 - invalid */
7548/* Opcode 0xf2 0x0f 0xd3 - invalid */
7549
7550/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7551FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7552/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7553FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7554/* Opcode 0xf3 0x0f 0xd4 - invalid */
7555/* Opcode 0xf2 0x0f 0xd4 - invalid */
7556
7557/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7558FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7559/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7560FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7561/* Opcode 0xf3 0x0f 0xd5 - invalid */
7562/* Opcode 0xf2 0x0f 0xd5 - invalid */
7563
7564/* Opcode 0x0f 0xd6 - invalid */
7565/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7566FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7567/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7568FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7569/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7570FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7571#if 0
7572FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7573{
7574 /* Docs says register only. */
7575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7576
7577 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7578 {
7579 case IEM_OP_PRF_SIZE_OP: /* SSE */
7580 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7581 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7582 IEM_MC_BEGIN(2, 0);
7583 IEM_MC_ARG(uint64_t *, pDst, 0);
7584 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7586 IEM_MC_PREPARE_SSE_USAGE();
7587 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7588 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7589 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7590 IEM_MC_ADVANCE_RIP();
7591 IEM_MC_END();
7592 return VINF_SUCCESS;
7593
7594 case 0: /* MMX */
7595 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7596 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7597 IEM_MC_BEGIN(2, 0);
7598 IEM_MC_ARG(uint64_t *, pDst, 0);
7599 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7600 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7601 IEM_MC_PREPARE_FPU_USAGE();
7602 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7603 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7604 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7605 IEM_MC_ADVANCE_RIP();
7606 IEM_MC_END();
7607 return VINF_SUCCESS;
7608
7609 default:
7610 return IEMOP_RAISE_INVALID_OPCODE();
7611 }
7612}
7613#endif
7614
7615
7616/** Opcode 0x0f 0xd7. */
7617FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7618{
7619 /* Docs says register only. */
7620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7621 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7622 return IEMOP_RAISE_INVALID_OPCODE();
7623
7624 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7625 /** @todo testcase: Check that the instruction implicitly clears the high
7626 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7627 * and opcode modifications are made to work with the whole width (not
7628 * just 128). */
7629 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7630 {
7631 case IEM_OP_PRF_SIZE_OP: /* SSE */
7632 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7633 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7634 IEM_MC_BEGIN(2, 0);
7635 IEM_MC_ARG(uint64_t *, pDst, 0);
7636 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7637 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7638 IEM_MC_PREPARE_SSE_USAGE();
7639 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7640 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7641 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7642 IEM_MC_ADVANCE_RIP();
7643 IEM_MC_END();
7644 return VINF_SUCCESS;
7645
7646 case 0: /* MMX */
7647 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7648 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7649 IEM_MC_BEGIN(2, 0);
7650 IEM_MC_ARG(uint64_t *, pDst, 0);
7651 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7652 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7653 IEM_MC_PREPARE_FPU_USAGE();
7654 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7655 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7656 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7657 IEM_MC_ADVANCE_RIP();
7658 IEM_MC_END();
7659 return VINF_SUCCESS;
7660
7661 default:
7662 return IEMOP_RAISE_INVALID_OPCODE();
7663 }
7664}
7665
7666
7667/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7668FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7669/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7670FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7671/* Opcode 0xf3 0x0f 0xd8 - invalid */
7672/* Opcode 0xf2 0x0f 0xd8 - invalid */
7673
7674/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7675FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7676/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7677FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7678/* Opcode 0xf3 0x0f 0xd9 - invalid */
7679/* Opcode 0xf2 0x0f 0xd9 - invalid */
7680
7681/** Opcode 0x0f 0xda - pminub Pq, Qq */
7682FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7683/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7684FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7685/* Opcode 0xf3 0x0f 0xda - invalid */
7686/* Opcode 0xf2 0x0f 0xda - invalid */
7687
7688/** Opcode 0x0f 0xdb - pand Pq, Qq */
7689FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7690/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7691FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7692/* Opcode 0xf3 0x0f 0xdb - invalid */
7693/* Opcode 0xf2 0x0f 0xdb - invalid */
7694
7695/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7696FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7697/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7698FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7699/* Opcode 0xf3 0x0f 0xdc - invalid */
7700/* Opcode 0xf2 0x0f 0xdc - invalid */
7701
7702/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7703FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7704/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7705FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7706/* Opcode 0xf3 0x0f 0xdd - invalid */
7707/* Opcode 0xf2 0x0f 0xdd - invalid */
7708
7709/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7710FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7711/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7712FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7713/* Opcode 0xf3 0x0f 0xde - invalid */
7714/* Opcode 0xf2 0x0f 0xde - invalid */
7715
7716/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7717FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7718/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7719FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7720/* Opcode 0xf3 0x0f 0xdf - invalid */
7721/* Opcode 0xf2 0x0f 0xdf - invalid */
7722
7723/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7724FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7725/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7726FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7727/* Opcode 0xf3 0x0f 0xe0 - invalid */
7728/* Opcode 0xf2 0x0f 0xe0 - invalid */
7729
7730/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7731FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7732/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7733FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7734/* Opcode 0xf3 0x0f 0xe1 - invalid */
7735/* Opcode 0xf2 0x0f 0xe1 - invalid */
7736
7737/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7738FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7739/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7740FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7741/* Opcode 0xf3 0x0f 0xe2 - invalid */
7742/* Opcode 0xf2 0x0f 0xe2 - invalid */
7743
7744/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7745FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7746/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7747FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7748/* Opcode 0xf3 0x0f 0xe3 - invalid */
7749/* Opcode 0xf2 0x0f 0xe3 - invalid */
7750
7751/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7752FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7753/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7754FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7755/* Opcode 0xf3 0x0f 0xe4 - invalid */
7756/* Opcode 0xf2 0x0f 0xe4 - invalid */
7757
7758/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7759FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7760/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7761FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7762/* Opcode 0xf3 0x0f 0xe5 - invalid */
7763/* Opcode 0xf2 0x0f 0xe5 - invalid */
7764
7765/* Opcode 0x0f 0xe6 - invalid */
7766/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7767FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7768/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7769FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7770/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7771FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7772
7773
7774/** Opcode 0x0f 0xe7. */
7775FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7776{
7777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7778 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7779 {
7780 /*
7781 * Register, memory.
7782 */
7783/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7784 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7785 {
7786
7787 case IEM_OP_PRF_SIZE_OP: /* SSE */
7788 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7789 IEM_MC_BEGIN(0, 2);
7790 IEM_MC_LOCAL(uint128_t, uSrc);
7791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7792
7793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7795 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7796 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7797
7798 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7799 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7800
7801 IEM_MC_ADVANCE_RIP();
7802 IEM_MC_END();
7803 break;
7804
7805 case 0: /* MMX */
7806 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7807 IEM_MC_BEGIN(0, 2);
7808 IEM_MC_LOCAL(uint64_t, uSrc);
7809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7810
7811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7813 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7814 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7815
7816 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7817 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7818
7819 IEM_MC_ADVANCE_RIP();
7820 IEM_MC_END();
7821 break;
7822
7823 default:
7824 return IEMOP_RAISE_INVALID_OPCODE();
7825 }
7826 }
7827 /* The register, register encoding is invalid. */
7828 else
7829 return IEMOP_RAISE_INVALID_OPCODE();
7830 return VINF_SUCCESS;
7831}
7832
7833
7834/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7835FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7836/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7837FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7838/* Opcode 0xf3 0x0f 0xe8 - invalid */
7839/* Opcode 0xf2 0x0f 0xe8 - invalid */
7840
7841/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7842FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7843/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7844FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7845/* Opcode 0xf3 0x0f 0xe9 - invalid */
7846/* Opcode 0xf2 0x0f 0xe9 - invalid */
7847
7848/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7849FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7850/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7851FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7852/* Opcode 0xf3 0x0f 0xea - invalid */
7853/* Opcode 0xf2 0x0f 0xea - invalid */
7854
7855/** Opcode 0x0f 0xeb - por Pq, Qq */
7856FNIEMOP_STUB(iemOp_por_Pq_Qq);
7857/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7858FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7859/* Opcode 0xf3 0x0f 0xeb - invalid */
7860/* Opcode 0xf2 0x0f 0xeb - invalid */
7861
7862/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7863FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7864/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7865FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7866/* Opcode 0xf3 0x0f 0xec - invalid */
7867/* Opcode 0xf2 0x0f 0xec - invalid */
7868
7869/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7870FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7871/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7872FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7873/* Opcode 0xf3 0x0f 0xed - invalid */
7874/* Opcode 0xf2 0x0f 0xed - invalid */
7875
7876/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7877FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7878/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7879FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7880/* Opcode 0xf3 0x0f 0xee - invalid */
7881/* Opcode 0xf2 0x0f 0xee - invalid */
7882
7883
7884/** Opcode 0x0f 0xef. */
7885FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7886{
7887 IEMOP_MNEMONIC(pxor, "pxor");
7888 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7889}
7890/* Opcode 0xf3 0x0f 0xef - invalid */
7891/* Opcode 0xf2 0x0f 0xef - invalid */
7892
7893/* Opcode 0x0f 0xf0 - invalid */
7894/* Opcode 0x66 0x0f 0xf0 - invalid */
7895/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7896FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7897
7898/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7899FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7900/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7901FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7902/* Opcode 0xf2 0x0f 0xf1 - invalid */
7903
7904/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7905FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7906/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7907FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7908/* Opcode 0xf2 0x0f 0xf2 - invalid */
7909
7910/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7911FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7912/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7913FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7914/* Opcode 0xf2 0x0f 0xf3 - invalid */
7915
7916/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7917FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7918/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7919FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7920/* Opcode 0xf2 0x0f 0xf4 - invalid */
7921
7922/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7923FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7924/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7925FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7926/* Opcode 0xf2 0x0f 0xf5 - invalid */
7927
7928/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7929FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7930/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7931FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7932/* Opcode 0xf2 0x0f 0xf6 - invalid */
7933
7934/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7935FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7936/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7937FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7938/* Opcode 0xf2 0x0f 0xf7 - invalid */
7939
7940/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7941FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7942/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7943FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7944/* Opcode 0xf2 0x0f 0xf8 - invalid */
7945
7946/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7947FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7948/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7949FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7950/* Opcode 0xf2 0x0f 0xf9 - invalid */
7951
7952/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7953FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7954/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7955FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7956/* Opcode 0xf2 0x0f 0xfa - invalid */
7957
7958/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7959FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7960/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7961FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7962/* Opcode 0xf2 0x0f 0xfb - invalid */
7963
7964/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7965FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7966/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7967FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7968/* Opcode 0xf2 0x0f 0xfc - invalid */
7969
7970/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7971FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7972/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7973FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7974/* Opcode 0xf2 0x0f 0xfd - invalid */
7975
7976/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7977FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7978/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7979FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7980/* Opcode 0xf2 0x0f 0xfe - invalid */
7981
7982
7983/** Opcode **** 0x0f 0xff - UD0 */
7984FNIEMOP_DEF(iemOp_ud0)
7985{
7986 IEMOP_MNEMONIC(ud0, "ud0");
7987 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7988 {
7989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7990#ifndef TST_IEM_CHECK_MC
7991 RTGCPTR GCPtrEff;
7992 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7993 if (rcStrict != VINF_SUCCESS)
7994 return rcStrict;
7995#endif
7996 IEMOP_HLP_DONE_DECODING();
7997 }
7998 return IEMOP_RAISE_INVALID_OPCODE();
7999}
8000
8001
8002
8003/** Repeats a_fn four times. For decoding tables. */
8004#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
8005
8006IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8007{
8008 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8009 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8010 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8011 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8012 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8013 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8014 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8015 /* 0x06 */ IEMOP_X4(iemOp_clts),
8016 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8017 /* 0x08 */ IEMOP_X4(iemOp_invd),
8018 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8019 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8020 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8021 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8022 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8023 /* 0x0e */ IEMOP_X4(iemOp_femms),
8024 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8025
8026 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8027 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8028 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8029 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8030 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8031 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8032 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8033 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8034 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8035 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8036 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8037 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8038 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8039 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8040 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8041 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8042
8043 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8044 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8045 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8046 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8047 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8048 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8049 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8050 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8051 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8052 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8053 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8054 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8055 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8056 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8057 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8058 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8059
8060 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8061 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8062 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8063 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8064 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8065 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8066 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8067 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8068 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8069 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8070 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8071 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8072 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8073 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8074 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8075 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8076
8077 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8078 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8079 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8080 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8081 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8082 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8083 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8084 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8085 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8086 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8087 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8088 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8089 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8090 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8091 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8092 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8093
8094 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8095 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8096 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8097 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8098 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8099 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8100 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8101 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8102 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8103 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8104 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8105 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8106 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8107 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8108 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8109 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8110
8111 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8112 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8113 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8114 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8115 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8116 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8117 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8118 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8119 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8120 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8121 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8122 /* 0x6b */ IEMOP_X4(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq),
8123 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8124 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8125 /* 0x6e */ IEMOP_X4(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey),
8126 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq),
8127
8128 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
8129 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8130 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8131 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8132 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8133 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8134 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8135 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8136
8137 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8138 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8140 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8142 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8143 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8144 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8145
8146 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8147 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8148 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8149 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8150 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8151 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8152 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8153 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8154 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8155 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8156 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8157 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8158 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8159 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8160 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8161 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8162
8163 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8164 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8165 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8166 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8167 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8168 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8169 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8170 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8171 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8172 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8173 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8174 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8175 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8176 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8177 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8178 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8179
8180 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8181 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8182 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8183 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8184 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8185 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8186 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8187 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8188 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8189 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8190 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8191 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8192 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8193 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8194 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8195 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8196
8197 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8198 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8199 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8200 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8201 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8202 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8203 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8204 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8205 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8206 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8207 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8208 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8209 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8210 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8211 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8212 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8213
8214 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8215 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8216 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8217 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8218 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8219 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8220 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8221 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8222 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8223 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8224 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8225 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8226 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8227 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8228 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8229 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8230
8231 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8232 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8233 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8234 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8235 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8237 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8238 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8239 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8240 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8241 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8242 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8243 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8244 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8245 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8246 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8247
8248 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8249 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8250 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8251 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8252 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8253 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8254 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8255 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8256 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8257 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8258 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8259 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8260 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8261 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8262 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8263 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8264
8265 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8266 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8267 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8268 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8269 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8270 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8271 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8272 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8273 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8274 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8275 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8276 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8277 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8278 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8279 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8280 /* 0xff */ IEMOP_X4(iemOp_ud0),
8281};
8282AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8283/** @} */
8284
8285
8286/** @name One byte opcodes.
8287 *
8288 * @{
8289 */
8290
8291/** Opcode 0x00. */
8292FNIEMOP_DEF(iemOp_add_Eb_Gb)
8293{
8294 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8295 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8296}
8297
8298
8299/** Opcode 0x01. */
8300FNIEMOP_DEF(iemOp_add_Ev_Gv)
8301{
8302 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8303 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8304}
8305
8306
8307/** Opcode 0x02. */
8308FNIEMOP_DEF(iemOp_add_Gb_Eb)
8309{
8310 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8311 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8312}
8313
8314
8315/** Opcode 0x03. */
8316FNIEMOP_DEF(iemOp_add_Gv_Ev)
8317{
8318 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8319 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8320}
8321
8322
8323/** Opcode 0x04. */
8324FNIEMOP_DEF(iemOp_add_Al_Ib)
8325{
8326 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8327 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8328}
8329
8330
8331/** Opcode 0x05. */
8332FNIEMOP_DEF(iemOp_add_eAX_Iz)
8333{
8334 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8335 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8336}
8337
8338
8339/** Opcode 0x06. */
8340FNIEMOP_DEF(iemOp_push_ES)
8341{
8342 IEMOP_MNEMONIC(push_es, "push es");
8343 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8344}
8345
8346
8347/** Opcode 0x07. */
8348FNIEMOP_DEF(iemOp_pop_ES)
8349{
8350 IEMOP_MNEMONIC(pop_es, "pop es");
8351 IEMOP_HLP_NO_64BIT();
8352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8353 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8354}
8355
8356
8357/** Opcode 0x08. */
8358FNIEMOP_DEF(iemOp_or_Eb_Gb)
8359{
8360 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8361 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8362 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8363}
8364
8365
8366/** Opcode 0x09. */
8367FNIEMOP_DEF(iemOp_or_Ev_Gv)
8368{
8369 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8370 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8371 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8372}
8373
8374
8375/** Opcode 0x0a. */
8376FNIEMOP_DEF(iemOp_or_Gb_Eb)
8377{
8378 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8379 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8381}
8382
8383
8384/** Opcode 0x0b. */
8385FNIEMOP_DEF(iemOp_or_Gv_Ev)
8386{
8387 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8388 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8389 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8390}
8391
8392
8393/** Opcode 0x0c. */
8394FNIEMOP_DEF(iemOp_or_Al_Ib)
8395{
8396 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8397 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8398 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8399}
8400
8401
8402/** Opcode 0x0d. */
8403FNIEMOP_DEF(iemOp_or_eAX_Iz)
8404{
8405 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8406 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8407 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8408}
8409
8410
8411/** Opcode 0x0e. */
8412FNIEMOP_DEF(iemOp_push_CS)
8413{
8414 IEMOP_MNEMONIC(push_cs, "push cs");
8415 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8416}
8417
8418
8419/** Opcode 0x0f. */
8420FNIEMOP_DEF(iemOp_2byteEscape)
8421{
8422#ifdef VBOX_STRICT
8423 static bool s_fTested = false;
8424 if (RT_LIKELY(s_fTested)) { /* likely */ }
8425 else
8426 {
8427 s_fTested = true;
8428 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8429 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8430 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8431 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8432 }
8433#endif
8434
8435 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8436
8437 /** @todo PUSH CS on 8086, undefined on 80186. */
8438 IEMOP_HLP_MIN_286();
8439 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8440}
8441
8442/** Opcode 0x10. */
8443FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8444{
8445 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8446 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8447}
8448
8449
8450/** Opcode 0x11. */
8451FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8452{
8453 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8454 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8455}
8456
8457
8458/** Opcode 0x12. */
8459FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8460{
8461 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8462 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8463}
8464
8465
8466/** Opcode 0x13. */
8467FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8468{
8469 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8470 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8471}
8472
8473
8474/** Opcode 0x14. */
8475FNIEMOP_DEF(iemOp_adc_Al_Ib)
8476{
8477 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8478 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8479}
8480
8481
8482/** Opcode 0x15. */
8483FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8484{
8485 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8486 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8487}
8488
8489
8490/** Opcode 0x16. */
8491FNIEMOP_DEF(iemOp_push_SS)
8492{
8493 IEMOP_MNEMONIC(push_ss, "push ss");
8494 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8495}
8496
8497
8498/** Opcode 0x17. */
8499FNIEMOP_DEF(iemOp_pop_SS)
8500{
8501 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8503 IEMOP_HLP_NO_64BIT();
8504 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8505}
8506
8507
8508/** Opcode 0x18. */
8509FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8510{
8511 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8512 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8513}
8514
8515
8516/** Opcode 0x19. */
8517FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8518{
8519 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8520 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8521}
8522
8523
8524/** Opcode 0x1a. */
8525FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8526{
8527 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8528 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8529}
8530
8531
8532/** Opcode 0x1b. */
8533FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8534{
8535 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8537}
8538
8539
8540/** Opcode 0x1c. */
8541FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8542{
8543 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8544 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8545}
8546
8547
8548/** Opcode 0x1d. */
8549FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8550{
8551 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8553}
8554
8555
8556/** Opcode 0x1e. */
8557FNIEMOP_DEF(iemOp_push_DS)
8558{
8559 IEMOP_MNEMONIC(push_ds, "push ds");
8560 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8561}
8562
8563
8564/** Opcode 0x1f. */
8565FNIEMOP_DEF(iemOp_pop_DS)
8566{
8567 IEMOP_MNEMONIC(pop_ds, "pop ds");
8568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8569 IEMOP_HLP_NO_64BIT();
8570 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8571}
8572
8573
8574/** Opcode 0x20. */
8575FNIEMOP_DEF(iemOp_and_Eb_Gb)
8576{
8577 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8578 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8579 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8580}
8581
8582
8583/** Opcode 0x21. */
8584FNIEMOP_DEF(iemOp_and_Ev_Gv)
8585{
8586 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8587 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8588 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8589}
8590
8591
8592/** Opcode 0x22. */
8593FNIEMOP_DEF(iemOp_and_Gb_Eb)
8594{
8595 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8596 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8597 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8598}
8599
8600
8601/** Opcode 0x23. */
8602FNIEMOP_DEF(iemOp_and_Gv_Ev)
8603{
8604 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8607}
8608
8609
8610/** Opcode 0x24. */
8611FNIEMOP_DEF(iemOp_and_Al_Ib)
8612{
8613 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8614 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8615 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8616}
8617
8618
8619/** Opcode 0x25. */
8620FNIEMOP_DEF(iemOp_and_eAX_Iz)
8621{
8622 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8623 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8624 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8625}
8626
8627
8628/** Opcode 0x26. */
8629FNIEMOP_DEF(iemOp_seg_ES)
8630{
8631 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8632 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8633 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8634
8635 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8636 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8637}
8638
8639
8640/** Opcode 0x27. */
8641FNIEMOP_DEF(iemOp_daa)
8642{
8643 IEMOP_MNEMONIC(daa_AL, "daa AL");
8644 IEMOP_HLP_NO_64BIT();
8645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8646 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8647 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8648}
8649
8650
8651/** Opcode 0x28. */
8652FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8653{
8654 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8655 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8656}
8657
8658
8659/** Opcode 0x29. */
8660FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8661{
8662 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8663 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8664}
8665
8666
8667/** Opcode 0x2a. */
8668FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8669{
8670 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8671 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8672}
8673
8674
8675/** Opcode 0x2b. */
8676FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8677{
8678 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8679 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8680}
8681
8682
8683/** Opcode 0x2c. */
8684FNIEMOP_DEF(iemOp_sub_Al_Ib)
8685{
8686 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8687 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8688}
8689
8690
8691/** Opcode 0x2d. */
8692FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8693{
8694 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8695 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8696}
8697
8698
8699/** Opcode 0x2e. */
8700FNIEMOP_DEF(iemOp_seg_CS)
8701{
8702 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8703 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8704 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8705
8706 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8707 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8708}
8709
8710
8711/** Opcode 0x2f. */
8712FNIEMOP_DEF(iemOp_das)
8713{
8714 IEMOP_MNEMONIC(das_AL, "das AL");
8715 IEMOP_HLP_NO_64BIT();
8716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8717 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8718 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8719}
8720
8721
8722/** Opcode 0x30. */
8723FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8724{
8725 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8726 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8727 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8728}
8729
8730
8731/** Opcode 0x31. */
8732FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8733{
8734 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8735 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8736 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8737}
8738
8739
8740/** Opcode 0x32. */
8741FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8742{
8743 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8744 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8745 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8746}
8747
8748
8749/** Opcode 0x33. */
8750FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8751{
8752 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8753 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8754 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8755}
8756
8757
8758/** Opcode 0x34. */
8759FNIEMOP_DEF(iemOp_xor_Al_Ib)
8760{
8761 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8762 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8763 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8764}
8765
8766
8767/** Opcode 0x35. */
8768FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8769{
8770 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8771 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8772 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8773}
8774
8775
8776/** Opcode 0x36. */
8777FNIEMOP_DEF(iemOp_seg_SS)
8778{
8779 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8780 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8781 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8782
8783 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8784 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8785}
8786
8787
8788/** Opcode 0x37. */
8789FNIEMOP_STUB(iemOp_aaa);
8790
8791
8792/** Opcode 0x38. */
8793FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8794{
8795 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8796 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8797}
8798
8799
8800/** Opcode 0x39. */
8801FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8802{
8803 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8804 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8805}
8806
8807
8808/** Opcode 0x3a. */
8809FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8810{
8811 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8812 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8813}
8814
8815
8816/** Opcode 0x3b. */
8817FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8818{
8819 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8820 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8821}
8822
8823
8824/** Opcode 0x3c. */
8825FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8826{
8827 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8828 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8829}
8830
8831
8832/** Opcode 0x3d. */
8833FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8834{
8835 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8836 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8837}
8838
8839
8840/** Opcode 0x3e. */
8841FNIEMOP_DEF(iemOp_seg_DS)
8842{
8843 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8844 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8845 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8846
8847 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8848 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8849}
8850
8851
8852/** Opcode 0x3f. */
8853FNIEMOP_STUB(iemOp_aas);
8854
8855/**
8856 * Common 'inc/dec/not/neg register' helper.
8857 */
8858FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8859{
8860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8861 switch (pVCpu->iem.s.enmEffOpSize)
8862 {
8863 case IEMMODE_16BIT:
8864 IEM_MC_BEGIN(2, 0);
8865 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8866 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8867 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8868 IEM_MC_REF_EFLAGS(pEFlags);
8869 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8870 IEM_MC_ADVANCE_RIP();
8871 IEM_MC_END();
8872 return VINF_SUCCESS;
8873
8874 case IEMMODE_32BIT:
8875 IEM_MC_BEGIN(2, 0);
8876 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8877 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8878 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8879 IEM_MC_REF_EFLAGS(pEFlags);
8880 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8881 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8882 IEM_MC_ADVANCE_RIP();
8883 IEM_MC_END();
8884 return VINF_SUCCESS;
8885
8886 case IEMMODE_64BIT:
8887 IEM_MC_BEGIN(2, 0);
8888 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8889 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8890 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8891 IEM_MC_REF_EFLAGS(pEFlags);
8892 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8893 IEM_MC_ADVANCE_RIP();
8894 IEM_MC_END();
8895 return VINF_SUCCESS;
8896 }
8897 return VINF_SUCCESS;
8898}
8899
8900
8901/** Opcode 0x40. */
8902FNIEMOP_DEF(iemOp_inc_eAX)
8903{
8904 /*
8905 * This is a REX prefix in 64-bit mode.
8906 */
8907 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8908 {
8909 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8910 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8911
8912 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8913 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8914 }
8915
8916 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8917 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8918}
8919
8920
8921/** Opcode 0x41. */
8922FNIEMOP_DEF(iemOp_inc_eCX)
8923{
8924 /*
8925 * This is a REX prefix in 64-bit mode.
8926 */
8927 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8928 {
8929 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8930 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8931 pVCpu->iem.s.uRexB = 1 << 3;
8932
8933 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8934 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8935 }
8936
8937 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8938 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8939}
8940
8941
8942/** Opcode 0x42. */
8943FNIEMOP_DEF(iemOp_inc_eDX)
8944{
8945 /*
8946 * This is a REX prefix in 64-bit mode.
8947 */
8948 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8949 {
8950 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8951 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8952 pVCpu->iem.s.uRexIndex = 1 << 3;
8953
8954 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8955 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8956 }
8957
8958 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8959 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8960}
8961
8962
8963
8964/** Opcode 0x43. */
8965FNIEMOP_DEF(iemOp_inc_eBX)
8966{
8967 /*
8968 * This is a REX prefix in 64-bit mode.
8969 */
8970 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8971 {
8972 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8974 pVCpu->iem.s.uRexB = 1 << 3;
8975 pVCpu->iem.s.uRexIndex = 1 << 3;
8976
8977 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8978 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8979 }
8980
8981 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8982 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8983}
8984
8985
8986/** Opcode 0x44. */
8987FNIEMOP_DEF(iemOp_inc_eSP)
8988{
8989 /*
8990 * This is a REX prefix in 64-bit mode.
8991 */
8992 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8993 {
8994 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8995 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8996 pVCpu->iem.s.uRexReg = 1 << 3;
8997
8998 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8999 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9000 }
9001
9002 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
9003 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
9004}
9005
9006
9007/** Opcode 0x45. */
9008FNIEMOP_DEF(iemOp_inc_eBP)
9009{
9010 /*
9011 * This is a REX prefix in 64-bit mode.
9012 */
9013 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9014 {
9015 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
9016 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
9017 pVCpu->iem.s.uRexReg = 1 << 3;
9018 pVCpu->iem.s.uRexB = 1 << 3;
9019
9020 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9021 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9022 }
9023
9024 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
9025 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
9026}
9027
9028
9029/** Opcode 0x46. */
9030FNIEMOP_DEF(iemOp_inc_eSI)
9031{
9032 /*
9033 * This is a REX prefix in 64-bit mode.
9034 */
9035 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9036 {
9037 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
9038 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
9039 pVCpu->iem.s.uRexReg = 1 << 3;
9040 pVCpu->iem.s.uRexIndex = 1 << 3;
9041
9042 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9043 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9044 }
9045
9046 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
9047 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
9048}
9049
9050
9051/** Opcode 0x47. */
9052FNIEMOP_DEF(iemOp_inc_eDI)
9053{
9054 /*
9055 * This is a REX prefix in 64-bit mode.
9056 */
9057 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9058 {
9059 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9060 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9061 pVCpu->iem.s.uRexReg = 1 << 3;
9062 pVCpu->iem.s.uRexB = 1 << 3;
9063 pVCpu->iem.s.uRexIndex = 1 << 3;
9064
9065 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9066 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9067 }
9068
9069 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9070 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9071}
9072
9073
9074/** Opcode 0x48. */
9075FNIEMOP_DEF(iemOp_dec_eAX)
9076{
9077 /*
9078 * This is a REX prefix in 64-bit mode.
9079 */
9080 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9081 {
9082 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9083 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9084 iemRecalEffOpSize(pVCpu);
9085
9086 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9087 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9088 }
9089
9090 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9091 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9092}
9093
9094
9095/** Opcode 0x49. */
9096FNIEMOP_DEF(iemOp_dec_eCX)
9097{
9098 /*
9099 * This is a REX prefix in 64-bit mode.
9100 */
9101 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9102 {
9103 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9104 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9105 pVCpu->iem.s.uRexB = 1 << 3;
9106 iemRecalEffOpSize(pVCpu);
9107
9108 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9109 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9110 }
9111
9112 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9113 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9114}
9115
9116
9117/** Opcode 0x4a. */
9118FNIEMOP_DEF(iemOp_dec_eDX)
9119{
9120 /*
9121 * This is a REX prefix in 64-bit mode.
9122 */
9123 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9124 {
9125 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9126 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9127 pVCpu->iem.s.uRexIndex = 1 << 3;
9128 iemRecalEffOpSize(pVCpu);
9129
9130 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9131 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9132 }
9133
9134 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9135 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9136}
9137
9138
9139/** Opcode 0x4b. */
9140FNIEMOP_DEF(iemOp_dec_eBX)
9141{
9142 /*
9143 * This is a REX prefix in 64-bit mode.
9144 */
9145 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9146 {
9147 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9148 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9149 pVCpu->iem.s.uRexB = 1 << 3;
9150 pVCpu->iem.s.uRexIndex = 1 << 3;
9151 iemRecalEffOpSize(pVCpu);
9152
9153 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9154 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9155 }
9156
9157 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9158 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9159}
9160
9161
9162/** Opcode 0x4c. */
9163FNIEMOP_DEF(iemOp_dec_eSP)
9164{
9165 /*
9166 * This is a REX prefix in 64-bit mode.
9167 */
9168 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9169 {
9170 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9171 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9172 pVCpu->iem.s.uRexReg = 1 << 3;
9173 iemRecalEffOpSize(pVCpu);
9174
9175 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9176 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9177 }
9178
9179 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9180 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9181}
9182
9183
9184/** Opcode 0x4d. */
9185FNIEMOP_DEF(iemOp_dec_eBP)
9186{
9187 /*
9188 * This is a REX prefix in 64-bit mode.
9189 */
9190 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9191 {
9192 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9193 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9194 pVCpu->iem.s.uRexReg = 1 << 3;
9195 pVCpu->iem.s.uRexB = 1 << 3;
9196 iemRecalEffOpSize(pVCpu);
9197
9198 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9199 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9200 }
9201
9202 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9203 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9204}
9205
9206
9207/** Opcode 0x4e. */
9208FNIEMOP_DEF(iemOp_dec_eSI)
9209{
9210 /*
9211 * This is a REX prefix in 64-bit mode.
9212 */
9213 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9214 {
9215 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9216 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9217 pVCpu->iem.s.uRexReg = 1 << 3;
9218 pVCpu->iem.s.uRexIndex = 1 << 3;
9219 iemRecalEffOpSize(pVCpu);
9220
9221 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9222 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9223 }
9224
9225 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9226 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9227}
9228
9229
9230/** Opcode 0x4f. */
9231FNIEMOP_DEF(iemOp_dec_eDI)
9232{
9233 /*
9234 * This is a REX prefix in 64-bit mode.
9235 */
9236 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9237 {
9238 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9239 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9240 pVCpu->iem.s.uRexReg = 1 << 3;
9241 pVCpu->iem.s.uRexB = 1 << 3;
9242 pVCpu->iem.s.uRexIndex = 1 << 3;
9243 iemRecalEffOpSize(pVCpu);
9244
9245 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9246 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9247 }
9248
9249 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9250 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9251}
9252
9253
9254/**
9255 * Common 'push register' helper.
9256 */
9257FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9258{
9259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9260 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9261 {
9262 iReg |= pVCpu->iem.s.uRexB;
9263 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9264 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9265 }
9266
9267 switch (pVCpu->iem.s.enmEffOpSize)
9268 {
9269 case IEMMODE_16BIT:
9270 IEM_MC_BEGIN(0, 1);
9271 IEM_MC_LOCAL(uint16_t, u16Value);
9272 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9273 IEM_MC_PUSH_U16(u16Value);
9274 IEM_MC_ADVANCE_RIP();
9275 IEM_MC_END();
9276 break;
9277
9278 case IEMMODE_32BIT:
9279 IEM_MC_BEGIN(0, 1);
9280 IEM_MC_LOCAL(uint32_t, u32Value);
9281 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9282 IEM_MC_PUSH_U32(u32Value);
9283 IEM_MC_ADVANCE_RIP();
9284 IEM_MC_END();
9285 break;
9286
9287 case IEMMODE_64BIT:
9288 IEM_MC_BEGIN(0, 1);
9289 IEM_MC_LOCAL(uint64_t, u64Value);
9290 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9291 IEM_MC_PUSH_U64(u64Value);
9292 IEM_MC_ADVANCE_RIP();
9293 IEM_MC_END();
9294 break;
9295 }
9296
9297 return VINF_SUCCESS;
9298}
9299
9300
9301/** Opcode 0x50. */
9302FNIEMOP_DEF(iemOp_push_eAX)
9303{
9304 IEMOP_MNEMONIC(push_rAX, "push rAX");
9305 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9306}
9307
9308
9309/** Opcode 0x51. */
9310FNIEMOP_DEF(iemOp_push_eCX)
9311{
9312 IEMOP_MNEMONIC(push_rCX, "push rCX");
9313 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9314}
9315
9316
9317/** Opcode 0x52. */
9318FNIEMOP_DEF(iemOp_push_eDX)
9319{
9320 IEMOP_MNEMONIC(push_rDX, "push rDX");
9321 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9322}
9323
9324
9325/** Opcode 0x53. */
9326FNIEMOP_DEF(iemOp_push_eBX)
9327{
9328 IEMOP_MNEMONIC(push_rBX, "push rBX");
9329 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9330}
9331
9332
9333/** Opcode 0x54. */
9334FNIEMOP_DEF(iemOp_push_eSP)
9335{
9336 IEMOP_MNEMONIC(push_rSP, "push rSP");
9337 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9338 {
9339 IEM_MC_BEGIN(0, 1);
9340 IEM_MC_LOCAL(uint16_t, u16Value);
9341 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9342 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9343 IEM_MC_PUSH_U16(u16Value);
9344 IEM_MC_ADVANCE_RIP();
9345 IEM_MC_END();
9346 }
9347 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9348}
9349
9350
9351/** Opcode 0x55. */
9352FNIEMOP_DEF(iemOp_push_eBP)
9353{
9354 IEMOP_MNEMONIC(push_rBP, "push rBP");
9355 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9356}
9357
9358
9359/** Opcode 0x56. */
9360FNIEMOP_DEF(iemOp_push_eSI)
9361{
9362 IEMOP_MNEMONIC(push_rSI, "push rSI");
9363 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9364}
9365
9366
9367/** Opcode 0x57. */
9368FNIEMOP_DEF(iemOp_push_eDI)
9369{
9370 IEMOP_MNEMONIC(push_rDI, "push rDI");
9371 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9372}
9373
9374
9375/**
9376 * Common 'pop register' helper.
9377 */
9378FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9379{
9380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9381 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9382 {
9383 iReg |= pVCpu->iem.s.uRexB;
9384 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9385 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9386 }
9387
9388 switch (pVCpu->iem.s.enmEffOpSize)
9389 {
9390 case IEMMODE_16BIT:
9391 IEM_MC_BEGIN(0, 1);
9392 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9393 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9394 IEM_MC_POP_U16(pu16Dst);
9395 IEM_MC_ADVANCE_RIP();
9396 IEM_MC_END();
9397 break;
9398
9399 case IEMMODE_32BIT:
9400 IEM_MC_BEGIN(0, 1);
9401 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9402 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9403 IEM_MC_POP_U32(pu32Dst);
9404 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9405 IEM_MC_ADVANCE_RIP();
9406 IEM_MC_END();
9407 break;
9408
9409 case IEMMODE_64BIT:
9410 IEM_MC_BEGIN(0, 1);
9411 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9412 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9413 IEM_MC_POP_U64(pu64Dst);
9414 IEM_MC_ADVANCE_RIP();
9415 IEM_MC_END();
9416 break;
9417 }
9418
9419 return VINF_SUCCESS;
9420}
9421
9422
9423/** Opcode 0x58. */
9424FNIEMOP_DEF(iemOp_pop_eAX)
9425{
9426 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9427 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9428}
9429
9430
9431/** Opcode 0x59. */
9432FNIEMOP_DEF(iemOp_pop_eCX)
9433{
9434 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9435 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9436}
9437
9438
9439/** Opcode 0x5a. */
9440FNIEMOP_DEF(iemOp_pop_eDX)
9441{
9442 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9443 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9444}
9445
9446
9447/** Opcode 0x5b. */
9448FNIEMOP_DEF(iemOp_pop_eBX)
9449{
9450 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9451 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9452}
9453
9454
9455/** Opcode 0x5c. */
9456FNIEMOP_DEF(iemOp_pop_eSP)
9457{
9458 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9459 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9460 {
9461 if (pVCpu->iem.s.uRexB)
9462 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9463 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9464 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9465 }
9466
9467 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9468 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9469 /** @todo add testcase for this instruction. */
9470 switch (pVCpu->iem.s.enmEffOpSize)
9471 {
9472 case IEMMODE_16BIT:
9473 IEM_MC_BEGIN(0, 1);
9474 IEM_MC_LOCAL(uint16_t, u16Dst);
9475 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9476 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9477 IEM_MC_ADVANCE_RIP();
9478 IEM_MC_END();
9479 break;
9480
9481 case IEMMODE_32BIT:
9482 IEM_MC_BEGIN(0, 1);
9483 IEM_MC_LOCAL(uint32_t, u32Dst);
9484 IEM_MC_POP_U32(&u32Dst);
9485 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9486 IEM_MC_ADVANCE_RIP();
9487 IEM_MC_END();
9488 break;
9489
9490 case IEMMODE_64BIT:
9491 IEM_MC_BEGIN(0, 1);
9492 IEM_MC_LOCAL(uint64_t, u64Dst);
9493 IEM_MC_POP_U64(&u64Dst);
9494 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9495 IEM_MC_ADVANCE_RIP();
9496 IEM_MC_END();
9497 break;
9498 }
9499
9500 return VINF_SUCCESS;
9501}
9502
9503
9504/** Opcode 0x5d. */
9505FNIEMOP_DEF(iemOp_pop_eBP)
9506{
9507 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9508 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9509}
9510
9511
9512/** Opcode 0x5e. */
9513FNIEMOP_DEF(iemOp_pop_eSI)
9514{
9515 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9516 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9517}
9518
9519
9520/** Opcode 0x5f. */
9521FNIEMOP_DEF(iemOp_pop_eDI)
9522{
9523 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9524 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9525}
9526
9527
9528/** Opcode 0x60. */
9529FNIEMOP_DEF(iemOp_pusha)
9530{
9531 IEMOP_MNEMONIC(pusha, "pusha");
9532 IEMOP_HLP_MIN_186();
9533 IEMOP_HLP_NO_64BIT();
9534 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9535 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9536 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9537 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9538}
9539
9540
9541/** Opcode 0x61. */
9542FNIEMOP_DEF(iemOp_popa)
9543{
9544 IEMOP_MNEMONIC(popa, "popa");
9545 IEMOP_HLP_MIN_186();
9546 IEMOP_HLP_NO_64BIT();
9547 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9548 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9549 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9550 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9551}
9552
9553
9554/** Opcode 0x62. */
9555FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9556// IEMOP_HLP_MIN_186();
9557
9558
9559/** Opcode 0x63 - non-64-bit modes. */
9560FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9561{
9562 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9563 IEMOP_HLP_MIN_286();
9564 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9566
9567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9568 {
9569 /* Register */
9570 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9571 IEM_MC_BEGIN(3, 0);
9572 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9573 IEM_MC_ARG(uint16_t, u16Src, 1);
9574 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9575
9576 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9577 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9578 IEM_MC_REF_EFLAGS(pEFlags);
9579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9580
9581 IEM_MC_ADVANCE_RIP();
9582 IEM_MC_END();
9583 }
9584 else
9585 {
9586 /* Memory */
9587 IEM_MC_BEGIN(3, 2);
9588 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9589 IEM_MC_ARG(uint16_t, u16Src, 1);
9590 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9592
9593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9594 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9595 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9596 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9597 IEM_MC_FETCH_EFLAGS(EFlags);
9598 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9599
9600 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9601 IEM_MC_COMMIT_EFLAGS(EFlags);
9602 IEM_MC_ADVANCE_RIP();
9603 IEM_MC_END();
9604 }
9605 return VINF_SUCCESS;
9606
9607}
9608
9609
9610/** Opcode 0x63.
9611 * @note This is a weird one. It works like a regular move instruction if
9612 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9613 * @todo This definitely needs a testcase to verify the odd cases. */
9614FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9615{
9616 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9617
9618 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9620
9621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9622 {
9623 /*
9624 * Register to register.
9625 */
9626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9627 IEM_MC_BEGIN(0, 1);
9628 IEM_MC_LOCAL(uint64_t, u64Value);
9629 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9630 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9631 IEM_MC_ADVANCE_RIP();
9632 IEM_MC_END();
9633 }
9634 else
9635 {
9636 /*
9637 * We're loading a register from memory.
9638 */
9639 IEM_MC_BEGIN(0, 2);
9640 IEM_MC_LOCAL(uint64_t, u64Value);
9641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9644 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9645 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9646 IEM_MC_ADVANCE_RIP();
9647 IEM_MC_END();
9648 }
9649 return VINF_SUCCESS;
9650}
9651
9652
9653/** Opcode 0x64. */
9654FNIEMOP_DEF(iemOp_seg_FS)
9655{
9656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9657 IEMOP_HLP_MIN_386();
9658
9659 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9660 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9661
9662 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9663 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9664}
9665
9666
9667/** Opcode 0x65. */
9668FNIEMOP_DEF(iemOp_seg_GS)
9669{
9670 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9671 IEMOP_HLP_MIN_386();
9672
9673 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9674 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9675
9676 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9677 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9678}
9679
9680
9681/** Opcode 0x66. */
9682FNIEMOP_DEF(iemOp_op_size)
9683{
9684 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9685 IEMOP_HLP_MIN_386();
9686
9687 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9688 iemRecalEffOpSize(pVCpu);
9689
9690 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9691 when REPZ or REPNZ are present. */
9692 if (pVCpu->iem.s.idxPrefix == 0)
9693 pVCpu->iem.s.idxPrefix = 1;
9694
9695 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9696 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9697}
9698
9699
9700/** Opcode 0x67. */
9701FNIEMOP_DEF(iemOp_addr_size)
9702{
9703 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9704 IEMOP_HLP_MIN_386();
9705
9706 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9707 switch (pVCpu->iem.s.enmDefAddrMode)
9708 {
9709 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9710 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9711 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9712 default: AssertFailed();
9713 }
9714
9715 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9716 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9717}
9718
9719
9720/** Opcode 0x68. */
9721FNIEMOP_DEF(iemOp_push_Iz)
9722{
9723 IEMOP_MNEMONIC(push_Iz, "push Iz");
9724 IEMOP_HLP_MIN_186();
9725 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9726 switch (pVCpu->iem.s.enmEffOpSize)
9727 {
9728 case IEMMODE_16BIT:
9729 {
9730 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9732 IEM_MC_BEGIN(0,0);
9733 IEM_MC_PUSH_U16(u16Imm);
9734 IEM_MC_ADVANCE_RIP();
9735 IEM_MC_END();
9736 return VINF_SUCCESS;
9737 }
9738
9739 case IEMMODE_32BIT:
9740 {
9741 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9743 IEM_MC_BEGIN(0,0);
9744 IEM_MC_PUSH_U32(u32Imm);
9745 IEM_MC_ADVANCE_RIP();
9746 IEM_MC_END();
9747 return VINF_SUCCESS;
9748 }
9749
9750 case IEMMODE_64BIT:
9751 {
9752 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9754 IEM_MC_BEGIN(0,0);
9755 IEM_MC_PUSH_U64(u64Imm);
9756 IEM_MC_ADVANCE_RIP();
9757 IEM_MC_END();
9758 return VINF_SUCCESS;
9759 }
9760
9761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9762 }
9763}
9764
9765
9766/** Opcode 0x69. */
9767FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9768{
9769 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9770 IEMOP_HLP_MIN_186();
9771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9773
9774 switch (pVCpu->iem.s.enmEffOpSize)
9775 {
9776 case IEMMODE_16BIT:
9777 {
9778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9779 {
9780 /* register operand */
9781 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9783
9784 IEM_MC_BEGIN(3, 1);
9785 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9786 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9787 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9788 IEM_MC_LOCAL(uint16_t, u16Tmp);
9789
9790 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9791 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9792 IEM_MC_REF_EFLAGS(pEFlags);
9793 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9794 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9795
9796 IEM_MC_ADVANCE_RIP();
9797 IEM_MC_END();
9798 }
9799 else
9800 {
9801 /* memory operand */
9802 IEM_MC_BEGIN(3, 2);
9803 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9804 IEM_MC_ARG(uint16_t, u16Src, 1);
9805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9806 IEM_MC_LOCAL(uint16_t, u16Tmp);
9807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9808
9809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9810 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9811 IEM_MC_ASSIGN(u16Src, u16Imm);
9812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9813 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9814 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9815 IEM_MC_REF_EFLAGS(pEFlags);
9816 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9817 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9818
9819 IEM_MC_ADVANCE_RIP();
9820 IEM_MC_END();
9821 }
9822 return VINF_SUCCESS;
9823 }
9824
9825 case IEMMODE_32BIT:
9826 {
9827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9828 {
9829 /* register operand */
9830 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9832
9833 IEM_MC_BEGIN(3, 1);
9834 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9835 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9836 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9837 IEM_MC_LOCAL(uint32_t, u32Tmp);
9838
9839 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9840 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9841 IEM_MC_REF_EFLAGS(pEFlags);
9842 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9843 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9844
9845 IEM_MC_ADVANCE_RIP();
9846 IEM_MC_END();
9847 }
9848 else
9849 {
9850 /* memory operand */
9851 IEM_MC_BEGIN(3, 2);
9852 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9853 IEM_MC_ARG(uint32_t, u32Src, 1);
9854 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9855 IEM_MC_LOCAL(uint32_t, u32Tmp);
9856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9857
9858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9859 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9860 IEM_MC_ASSIGN(u32Src, u32Imm);
9861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9862 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9863 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9864 IEM_MC_REF_EFLAGS(pEFlags);
9865 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9866 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9867
9868 IEM_MC_ADVANCE_RIP();
9869 IEM_MC_END();
9870 }
9871 return VINF_SUCCESS;
9872 }
9873
9874 case IEMMODE_64BIT:
9875 {
9876 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9877 {
9878 /* register operand */
9879 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9881
9882 IEM_MC_BEGIN(3, 1);
9883 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9884 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9885 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9886 IEM_MC_LOCAL(uint64_t, u64Tmp);
9887
9888 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9889 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9890 IEM_MC_REF_EFLAGS(pEFlags);
9891 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9892 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9893
9894 IEM_MC_ADVANCE_RIP();
9895 IEM_MC_END();
9896 }
9897 else
9898 {
9899 /* memory operand */
9900 IEM_MC_BEGIN(3, 2);
9901 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9902 IEM_MC_ARG(uint64_t, u64Src, 1);
9903 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9904 IEM_MC_LOCAL(uint64_t, u64Tmp);
9905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9906
9907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9908 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9909 IEM_MC_ASSIGN(u64Src, u64Imm);
9910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9911 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9912 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9913 IEM_MC_REF_EFLAGS(pEFlags);
9914 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9915 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9916
9917 IEM_MC_ADVANCE_RIP();
9918 IEM_MC_END();
9919 }
9920 return VINF_SUCCESS;
9921 }
9922 }
9923 AssertFailedReturn(VERR_IEM_IPE_9);
9924}
9925
9926
9927/** Opcode 0x6a. */
9928FNIEMOP_DEF(iemOp_push_Ib)
9929{
9930 IEMOP_MNEMONIC(push_Ib, "push Ib");
9931 IEMOP_HLP_MIN_186();
9932 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9934 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9935
9936 IEM_MC_BEGIN(0,0);
9937 switch (pVCpu->iem.s.enmEffOpSize)
9938 {
9939 case IEMMODE_16BIT:
9940 IEM_MC_PUSH_U16(i8Imm);
9941 break;
9942 case IEMMODE_32BIT:
9943 IEM_MC_PUSH_U32(i8Imm);
9944 break;
9945 case IEMMODE_64BIT:
9946 IEM_MC_PUSH_U64(i8Imm);
9947 break;
9948 }
9949 IEM_MC_ADVANCE_RIP();
9950 IEM_MC_END();
9951 return VINF_SUCCESS;
9952}
9953
9954
9955/** Opcode 0x6b. */
9956FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9957{
9958 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9959 IEMOP_HLP_MIN_186();
9960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9961 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9962
9963 switch (pVCpu->iem.s.enmEffOpSize)
9964 {
9965 case IEMMODE_16BIT:
9966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9967 {
9968 /* register operand */
9969 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9971
9972 IEM_MC_BEGIN(3, 1);
9973 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9974 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9976 IEM_MC_LOCAL(uint16_t, u16Tmp);
9977
9978 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9979 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9980 IEM_MC_REF_EFLAGS(pEFlags);
9981 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9982 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9983
9984 IEM_MC_ADVANCE_RIP();
9985 IEM_MC_END();
9986 }
9987 else
9988 {
9989 /* memory operand */
9990 IEM_MC_BEGIN(3, 2);
9991 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9992 IEM_MC_ARG(uint16_t, u16Src, 1);
9993 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9994 IEM_MC_LOCAL(uint16_t, u16Tmp);
9995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9996
9997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9998 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9999 IEM_MC_ASSIGN(u16Src, u16Imm);
10000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10001 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10002 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10003 IEM_MC_REF_EFLAGS(pEFlags);
10004 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10005 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10006
10007 IEM_MC_ADVANCE_RIP();
10008 IEM_MC_END();
10009 }
10010 return VINF_SUCCESS;
10011
10012 case IEMMODE_32BIT:
10013 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10014 {
10015 /* register operand */
10016 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10018
10019 IEM_MC_BEGIN(3, 1);
10020 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10021 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
10022 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10023 IEM_MC_LOCAL(uint32_t, u32Tmp);
10024
10025 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10026 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10027 IEM_MC_REF_EFLAGS(pEFlags);
10028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10029 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10030
10031 IEM_MC_ADVANCE_RIP();
10032 IEM_MC_END();
10033 }
10034 else
10035 {
10036 /* memory operand */
10037 IEM_MC_BEGIN(3, 2);
10038 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10039 IEM_MC_ARG(uint32_t, u32Src, 1);
10040 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10041 IEM_MC_LOCAL(uint32_t, u32Tmp);
10042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10043
10044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10045 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
10046 IEM_MC_ASSIGN(u32Src, u32Imm);
10047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10048 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10049 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10050 IEM_MC_REF_EFLAGS(pEFlags);
10051 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10052 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10053
10054 IEM_MC_ADVANCE_RIP();
10055 IEM_MC_END();
10056 }
10057 return VINF_SUCCESS;
10058
10059 case IEMMODE_64BIT:
10060 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10061 {
10062 /* register operand */
10063 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10065
10066 IEM_MC_BEGIN(3, 1);
10067 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10068 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10069 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10070 IEM_MC_LOCAL(uint64_t, u64Tmp);
10071
10072 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10073 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10074 IEM_MC_REF_EFLAGS(pEFlags);
10075 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10076 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10077
10078 IEM_MC_ADVANCE_RIP();
10079 IEM_MC_END();
10080 }
10081 else
10082 {
10083 /* memory operand */
10084 IEM_MC_BEGIN(3, 2);
10085 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10086 IEM_MC_ARG(uint64_t, u64Src, 1);
10087 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10088 IEM_MC_LOCAL(uint64_t, u64Tmp);
10089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10090
10091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10092 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10093 IEM_MC_ASSIGN(u64Src, u64Imm);
10094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10095 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10096 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10097 IEM_MC_REF_EFLAGS(pEFlags);
10098 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10099 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10100
10101 IEM_MC_ADVANCE_RIP();
10102 IEM_MC_END();
10103 }
10104 return VINF_SUCCESS;
10105 }
10106 AssertFailedReturn(VERR_IEM_IPE_8);
10107}
10108
10109
10110/** Opcode 0x6c. */
10111FNIEMOP_DEF(iemOp_insb_Yb_DX)
10112{
10113 IEMOP_HLP_MIN_186();
10114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10115 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10116 {
10117 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10118 switch (pVCpu->iem.s.enmEffAddrMode)
10119 {
10120 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10121 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10122 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10124 }
10125 }
10126 else
10127 {
10128 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10129 switch (pVCpu->iem.s.enmEffAddrMode)
10130 {
10131 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10132 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10133 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10135 }
10136 }
10137}
10138
10139
10140/** Opcode 0x6d. */
10141FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10142{
10143 IEMOP_HLP_MIN_186();
10144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10145 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10146 {
10147 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10148 switch (pVCpu->iem.s.enmEffOpSize)
10149 {
10150 case IEMMODE_16BIT:
10151 switch (pVCpu->iem.s.enmEffAddrMode)
10152 {
10153 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10154 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10155 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10157 }
10158 break;
10159 case IEMMODE_64BIT:
10160 case IEMMODE_32BIT:
10161 switch (pVCpu->iem.s.enmEffAddrMode)
10162 {
10163 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10164 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10165 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10167 }
10168 break;
10169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10170 }
10171 }
10172 else
10173 {
10174 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10175 switch (pVCpu->iem.s.enmEffOpSize)
10176 {
10177 case IEMMODE_16BIT:
10178 switch (pVCpu->iem.s.enmEffAddrMode)
10179 {
10180 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10181 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10182 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10184 }
10185 break;
10186 case IEMMODE_64BIT:
10187 case IEMMODE_32BIT:
10188 switch (pVCpu->iem.s.enmEffAddrMode)
10189 {
10190 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10191 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10192 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10194 }
10195 break;
10196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10197 }
10198 }
10199}
10200
10201
10202/** Opcode 0x6e. */
10203FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10204{
10205 IEMOP_HLP_MIN_186();
10206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10207 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10208 {
10209 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10210 switch (pVCpu->iem.s.enmEffAddrMode)
10211 {
10212 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10213 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10214 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10216 }
10217 }
10218 else
10219 {
10220 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10221 switch (pVCpu->iem.s.enmEffAddrMode)
10222 {
10223 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10224 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10225 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10227 }
10228 }
10229}
10230
10231
10232/** Opcode 0x6f. */
10233FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10234{
10235 IEMOP_HLP_MIN_186();
10236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10237 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10238 {
10239 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10240 switch (pVCpu->iem.s.enmEffOpSize)
10241 {
10242 case IEMMODE_16BIT:
10243 switch (pVCpu->iem.s.enmEffAddrMode)
10244 {
10245 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10246 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10247 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10249 }
10250 break;
10251 case IEMMODE_64BIT:
10252 case IEMMODE_32BIT:
10253 switch (pVCpu->iem.s.enmEffAddrMode)
10254 {
10255 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10256 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10257 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10259 }
10260 break;
10261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10262 }
10263 }
10264 else
10265 {
10266 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10267 switch (pVCpu->iem.s.enmEffOpSize)
10268 {
10269 case IEMMODE_16BIT:
10270 switch (pVCpu->iem.s.enmEffAddrMode)
10271 {
10272 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10273 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10274 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10276 }
10277 break;
10278 case IEMMODE_64BIT:
10279 case IEMMODE_32BIT:
10280 switch (pVCpu->iem.s.enmEffAddrMode)
10281 {
10282 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10283 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10284 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10286 }
10287 break;
10288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10289 }
10290 }
10291}
10292
10293
10294/** Opcode 0x70. */
10295FNIEMOP_DEF(iemOp_jo_Jb)
10296{
10297 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10298 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10300 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10301
10302 IEM_MC_BEGIN(0, 0);
10303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10304 IEM_MC_REL_JMP_S8(i8Imm);
10305 } IEM_MC_ELSE() {
10306 IEM_MC_ADVANCE_RIP();
10307 } IEM_MC_ENDIF();
10308 IEM_MC_END();
10309 return VINF_SUCCESS;
10310}
10311
10312
10313/** Opcode 0x71. */
10314FNIEMOP_DEF(iemOp_jno_Jb)
10315{
10316 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10317 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10320
10321 IEM_MC_BEGIN(0, 0);
10322 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10323 IEM_MC_ADVANCE_RIP();
10324 } IEM_MC_ELSE() {
10325 IEM_MC_REL_JMP_S8(i8Imm);
10326 } IEM_MC_ENDIF();
10327 IEM_MC_END();
10328 return VINF_SUCCESS;
10329}
10330
10331/** Opcode 0x72. */
10332FNIEMOP_DEF(iemOp_jc_Jb)
10333{
10334 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10335 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10338
10339 IEM_MC_BEGIN(0, 0);
10340 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10341 IEM_MC_REL_JMP_S8(i8Imm);
10342 } IEM_MC_ELSE() {
10343 IEM_MC_ADVANCE_RIP();
10344 } IEM_MC_ENDIF();
10345 IEM_MC_END();
10346 return VINF_SUCCESS;
10347}
10348
10349
10350/** Opcode 0x73. */
10351FNIEMOP_DEF(iemOp_jnc_Jb)
10352{
10353 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10354 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10356 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10357
10358 IEM_MC_BEGIN(0, 0);
10359 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10360 IEM_MC_ADVANCE_RIP();
10361 } IEM_MC_ELSE() {
10362 IEM_MC_REL_JMP_S8(i8Imm);
10363 } IEM_MC_ENDIF();
10364 IEM_MC_END();
10365 return VINF_SUCCESS;
10366}
10367
10368
10369/** Opcode 0x74. */
10370FNIEMOP_DEF(iemOp_je_Jb)
10371{
10372 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10373 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10375 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10376
10377 IEM_MC_BEGIN(0, 0);
10378 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10379 IEM_MC_REL_JMP_S8(i8Imm);
10380 } IEM_MC_ELSE() {
10381 IEM_MC_ADVANCE_RIP();
10382 } IEM_MC_ENDIF();
10383 IEM_MC_END();
10384 return VINF_SUCCESS;
10385}
10386
10387
10388/** Opcode 0x75. */
10389FNIEMOP_DEF(iemOp_jne_Jb)
10390{
10391 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10392 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10394 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10395
10396 IEM_MC_BEGIN(0, 0);
10397 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10398 IEM_MC_ADVANCE_RIP();
10399 } IEM_MC_ELSE() {
10400 IEM_MC_REL_JMP_S8(i8Imm);
10401 } IEM_MC_ENDIF();
10402 IEM_MC_END();
10403 return VINF_SUCCESS;
10404}
10405
10406
10407/** Opcode 0x76. */
10408FNIEMOP_DEF(iemOp_jbe_Jb)
10409{
10410 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10411 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10413 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10414
10415 IEM_MC_BEGIN(0, 0);
10416 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10417 IEM_MC_REL_JMP_S8(i8Imm);
10418 } IEM_MC_ELSE() {
10419 IEM_MC_ADVANCE_RIP();
10420 } IEM_MC_ENDIF();
10421 IEM_MC_END();
10422 return VINF_SUCCESS;
10423}
10424
10425
10426/** Opcode 0x77. */
10427FNIEMOP_DEF(iemOp_jnbe_Jb)
10428{
10429 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10430 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10432 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10433
10434 IEM_MC_BEGIN(0, 0);
10435 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10436 IEM_MC_ADVANCE_RIP();
10437 } IEM_MC_ELSE() {
10438 IEM_MC_REL_JMP_S8(i8Imm);
10439 } IEM_MC_ENDIF();
10440 IEM_MC_END();
10441 return VINF_SUCCESS;
10442}
10443
10444
10445/** Opcode 0x78. */
10446FNIEMOP_DEF(iemOp_js_Jb)
10447{
10448 IEMOP_MNEMONIC(js_Jb, "js Jb");
10449 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10451 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10452
10453 IEM_MC_BEGIN(0, 0);
10454 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10455 IEM_MC_REL_JMP_S8(i8Imm);
10456 } IEM_MC_ELSE() {
10457 IEM_MC_ADVANCE_RIP();
10458 } IEM_MC_ENDIF();
10459 IEM_MC_END();
10460 return VINF_SUCCESS;
10461}
10462
10463
10464/** Opcode 0x79. */
10465FNIEMOP_DEF(iemOp_jns_Jb)
10466{
10467 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10468 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10471
10472 IEM_MC_BEGIN(0, 0);
10473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10474 IEM_MC_ADVANCE_RIP();
10475 } IEM_MC_ELSE() {
10476 IEM_MC_REL_JMP_S8(i8Imm);
10477 } IEM_MC_ENDIF();
10478 IEM_MC_END();
10479 return VINF_SUCCESS;
10480}
10481
10482
10483/** Opcode 0x7a. */
10484FNIEMOP_DEF(iemOp_jp_Jb)
10485{
10486 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10487 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10489 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10490
10491 IEM_MC_BEGIN(0, 0);
10492 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10493 IEM_MC_REL_JMP_S8(i8Imm);
10494 } IEM_MC_ELSE() {
10495 IEM_MC_ADVANCE_RIP();
10496 } IEM_MC_ENDIF();
10497 IEM_MC_END();
10498 return VINF_SUCCESS;
10499}
10500
10501
10502/** Opcode 0x7b. */
10503FNIEMOP_DEF(iemOp_jnp_Jb)
10504{
10505 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10506 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10509
10510 IEM_MC_BEGIN(0, 0);
10511 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10512 IEM_MC_ADVANCE_RIP();
10513 } IEM_MC_ELSE() {
10514 IEM_MC_REL_JMP_S8(i8Imm);
10515 } IEM_MC_ENDIF();
10516 IEM_MC_END();
10517 return VINF_SUCCESS;
10518}
10519
10520
10521/** Opcode 0x7c. */
10522FNIEMOP_DEF(iemOp_jl_Jb)
10523{
10524 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10525 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10527 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10528
10529 IEM_MC_BEGIN(0, 0);
10530 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10531 IEM_MC_REL_JMP_S8(i8Imm);
10532 } IEM_MC_ELSE() {
10533 IEM_MC_ADVANCE_RIP();
10534 } IEM_MC_ENDIF();
10535 IEM_MC_END();
10536 return VINF_SUCCESS;
10537}
10538
10539
10540/** Opcode 0x7d. */
10541FNIEMOP_DEF(iemOp_jnl_Jb)
10542{
10543 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10544 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10546 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10547
10548 IEM_MC_BEGIN(0, 0);
10549 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10550 IEM_MC_ADVANCE_RIP();
10551 } IEM_MC_ELSE() {
10552 IEM_MC_REL_JMP_S8(i8Imm);
10553 } IEM_MC_ENDIF();
10554 IEM_MC_END();
10555 return VINF_SUCCESS;
10556}
10557
10558
10559/** Opcode 0x7e. */
10560FNIEMOP_DEF(iemOp_jle_Jb)
10561{
10562 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10563 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10565 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10566
10567 IEM_MC_BEGIN(0, 0);
10568 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10569 IEM_MC_REL_JMP_S8(i8Imm);
10570 } IEM_MC_ELSE() {
10571 IEM_MC_ADVANCE_RIP();
10572 } IEM_MC_ENDIF();
10573 IEM_MC_END();
10574 return VINF_SUCCESS;
10575}
10576
10577
10578/** Opcode 0x7f. */
10579FNIEMOP_DEF(iemOp_jnle_Jb)
10580{
10581 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10582 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10584 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10585
10586 IEM_MC_BEGIN(0, 0);
10587 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10588 IEM_MC_ADVANCE_RIP();
10589 } IEM_MC_ELSE() {
10590 IEM_MC_REL_JMP_S8(i8Imm);
10591 } IEM_MC_ENDIF();
10592 IEM_MC_END();
10593 return VINF_SUCCESS;
10594}
10595
10596
10597/** Opcode 0x80. */
10598FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10599{
10600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10601 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10602 {
10603 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10604 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10605 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10606 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10607 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10608 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10609 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10610 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10611 }
10612 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10613
10614 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10615 {
10616 /* register target */
10617 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10619 IEM_MC_BEGIN(3, 0);
10620 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10621 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10622 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10623
10624 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10625 IEM_MC_REF_EFLAGS(pEFlags);
10626 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10627
10628 IEM_MC_ADVANCE_RIP();
10629 IEM_MC_END();
10630 }
10631 else
10632 {
10633 /* memory target */
10634 uint32_t fAccess;
10635 if (pImpl->pfnLockedU8)
10636 fAccess = IEM_ACCESS_DATA_RW;
10637 else /* CMP */
10638 fAccess = IEM_ACCESS_DATA_R;
10639 IEM_MC_BEGIN(3, 2);
10640 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10641 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10643
10644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10645 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10646 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10647 if (pImpl->pfnLockedU8)
10648 IEMOP_HLP_DONE_DECODING();
10649 else
10650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10651
10652 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10653 IEM_MC_FETCH_EFLAGS(EFlags);
10654 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10655 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10656 else
10657 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10658
10659 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10660 IEM_MC_COMMIT_EFLAGS(EFlags);
10661 IEM_MC_ADVANCE_RIP();
10662 IEM_MC_END();
10663 }
10664 return VINF_SUCCESS;
10665}
10666
10667
10668/** Opcode 0x81. */
10669FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10670{
10671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10672 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10673 {
10674 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10675 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10676 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10677 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10678 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10679 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10680 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10681 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10682 }
10683 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10684
10685 switch (pVCpu->iem.s.enmEffOpSize)
10686 {
10687 case IEMMODE_16BIT:
10688 {
10689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10690 {
10691 /* register target */
10692 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10694 IEM_MC_BEGIN(3, 0);
10695 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10696 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10697 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10698
10699 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10700 IEM_MC_REF_EFLAGS(pEFlags);
10701 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10702
10703 IEM_MC_ADVANCE_RIP();
10704 IEM_MC_END();
10705 }
10706 else
10707 {
10708 /* memory target */
10709 uint32_t fAccess;
10710 if (pImpl->pfnLockedU16)
10711 fAccess = IEM_ACCESS_DATA_RW;
10712 else /* CMP, TEST */
10713 fAccess = IEM_ACCESS_DATA_R;
10714 IEM_MC_BEGIN(3, 2);
10715 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10716 IEM_MC_ARG(uint16_t, u16Src, 1);
10717 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10719
10720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10721 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10722 IEM_MC_ASSIGN(u16Src, u16Imm);
10723 if (pImpl->pfnLockedU16)
10724 IEMOP_HLP_DONE_DECODING();
10725 else
10726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10727 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10728 IEM_MC_FETCH_EFLAGS(EFlags);
10729 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10730 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10731 else
10732 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10733
10734 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10735 IEM_MC_COMMIT_EFLAGS(EFlags);
10736 IEM_MC_ADVANCE_RIP();
10737 IEM_MC_END();
10738 }
10739 break;
10740 }
10741
10742 case IEMMODE_32BIT:
10743 {
10744 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10745 {
10746 /* register target */
10747 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10749 IEM_MC_BEGIN(3, 0);
10750 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10751 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10752 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10753
10754 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10755 IEM_MC_REF_EFLAGS(pEFlags);
10756 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10757 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10758
10759 IEM_MC_ADVANCE_RIP();
10760 IEM_MC_END();
10761 }
10762 else
10763 {
10764 /* memory target */
10765 uint32_t fAccess;
10766 if (pImpl->pfnLockedU32)
10767 fAccess = IEM_ACCESS_DATA_RW;
10768 else /* CMP, TEST */
10769 fAccess = IEM_ACCESS_DATA_R;
10770 IEM_MC_BEGIN(3, 2);
10771 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10772 IEM_MC_ARG(uint32_t, u32Src, 1);
10773 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10775
10776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10777 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10778 IEM_MC_ASSIGN(u32Src, u32Imm);
10779 if (pImpl->pfnLockedU32)
10780 IEMOP_HLP_DONE_DECODING();
10781 else
10782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10783 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10784 IEM_MC_FETCH_EFLAGS(EFlags);
10785 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10787 else
10788 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10789
10790 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10791 IEM_MC_COMMIT_EFLAGS(EFlags);
10792 IEM_MC_ADVANCE_RIP();
10793 IEM_MC_END();
10794 }
10795 break;
10796 }
10797
10798 case IEMMODE_64BIT:
10799 {
10800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10801 {
10802 /* register target */
10803 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10805 IEM_MC_BEGIN(3, 0);
10806 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10807 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10808 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10809
10810 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10811 IEM_MC_REF_EFLAGS(pEFlags);
10812 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10813
10814 IEM_MC_ADVANCE_RIP();
10815 IEM_MC_END();
10816 }
10817 else
10818 {
10819 /* memory target */
10820 uint32_t fAccess;
10821 if (pImpl->pfnLockedU64)
10822 fAccess = IEM_ACCESS_DATA_RW;
10823 else /* CMP */
10824 fAccess = IEM_ACCESS_DATA_R;
10825 IEM_MC_BEGIN(3, 2);
10826 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10827 IEM_MC_ARG(uint64_t, u64Src, 1);
10828 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10830
10831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10832 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10833 if (pImpl->pfnLockedU64)
10834 IEMOP_HLP_DONE_DECODING();
10835 else
10836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10837 IEM_MC_ASSIGN(u64Src, u64Imm);
10838 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10839 IEM_MC_FETCH_EFLAGS(EFlags);
10840 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10841 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10842 else
10843 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10844
10845 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10846 IEM_MC_COMMIT_EFLAGS(EFlags);
10847 IEM_MC_ADVANCE_RIP();
10848 IEM_MC_END();
10849 }
10850 break;
10851 }
10852 }
10853 return VINF_SUCCESS;
10854}
10855
10856
10857/** Opcode 0x82. */
10858FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10859{
10860 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10861 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10862}
10863
10864
10865/** Opcode 0x83. */
10866FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10867{
10868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10869 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10870 {
10871 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10872 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10873 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10874 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10875 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10876 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10877 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10878 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10879 }
10880 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10881 to the 386 even if absent in the intel reference manuals and some
10882 3rd party opcode listings. */
10883 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10884
10885 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10886 {
10887 /*
10888 * Register target
10889 */
10890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10891 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10892 switch (pVCpu->iem.s.enmEffOpSize)
10893 {
10894 case IEMMODE_16BIT:
10895 {
10896 IEM_MC_BEGIN(3, 0);
10897 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10898 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10899 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10900
10901 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10902 IEM_MC_REF_EFLAGS(pEFlags);
10903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10904
10905 IEM_MC_ADVANCE_RIP();
10906 IEM_MC_END();
10907 break;
10908 }
10909
10910 case IEMMODE_32BIT:
10911 {
10912 IEM_MC_BEGIN(3, 0);
10913 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10914 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10915 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10916
10917 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10918 IEM_MC_REF_EFLAGS(pEFlags);
10919 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10920 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10921
10922 IEM_MC_ADVANCE_RIP();
10923 IEM_MC_END();
10924 break;
10925 }
10926
10927 case IEMMODE_64BIT:
10928 {
10929 IEM_MC_BEGIN(3, 0);
10930 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10931 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10932 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10933
10934 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10935 IEM_MC_REF_EFLAGS(pEFlags);
10936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10937
10938 IEM_MC_ADVANCE_RIP();
10939 IEM_MC_END();
10940 break;
10941 }
10942 }
10943 }
10944 else
10945 {
10946 /*
10947 * Memory target.
10948 */
10949 uint32_t fAccess;
10950 if (pImpl->pfnLockedU16)
10951 fAccess = IEM_ACCESS_DATA_RW;
10952 else /* CMP */
10953 fAccess = IEM_ACCESS_DATA_R;
10954
10955 switch (pVCpu->iem.s.enmEffOpSize)
10956 {
10957 case IEMMODE_16BIT:
10958 {
10959 IEM_MC_BEGIN(3, 2);
10960 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10961 IEM_MC_ARG(uint16_t, u16Src, 1);
10962 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10964
10965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10966 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10967 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10968 if (pImpl->pfnLockedU16)
10969 IEMOP_HLP_DONE_DECODING();
10970 else
10971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10972 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10973 IEM_MC_FETCH_EFLAGS(EFlags);
10974 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10975 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10976 else
10977 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10978
10979 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10980 IEM_MC_COMMIT_EFLAGS(EFlags);
10981 IEM_MC_ADVANCE_RIP();
10982 IEM_MC_END();
10983 break;
10984 }
10985
10986 case IEMMODE_32BIT:
10987 {
10988 IEM_MC_BEGIN(3, 2);
10989 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10990 IEM_MC_ARG(uint32_t, u32Src, 1);
10991 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10993
10994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10995 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10996 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10997 if (pImpl->pfnLockedU32)
10998 IEMOP_HLP_DONE_DECODING();
10999 else
11000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11001 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11002 IEM_MC_FETCH_EFLAGS(EFlags);
11003 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11004 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11005 else
11006 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
11007
11008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
11009 IEM_MC_COMMIT_EFLAGS(EFlags);
11010 IEM_MC_ADVANCE_RIP();
11011 IEM_MC_END();
11012 break;
11013 }
11014
11015 case IEMMODE_64BIT:
11016 {
11017 IEM_MC_BEGIN(3, 2);
11018 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11019 IEM_MC_ARG(uint64_t, u64Src, 1);
11020 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11022
11023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11024 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11025 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
11026 if (pImpl->pfnLockedU64)
11027 IEMOP_HLP_DONE_DECODING();
11028 else
11029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11030 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11031 IEM_MC_FETCH_EFLAGS(EFlags);
11032 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11033 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11034 else
11035 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11036
11037 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11038 IEM_MC_COMMIT_EFLAGS(EFlags);
11039 IEM_MC_ADVANCE_RIP();
11040 IEM_MC_END();
11041 break;
11042 }
11043 }
11044 }
11045 return VINF_SUCCESS;
11046}
11047
11048
11049/** Opcode 0x84. */
11050FNIEMOP_DEF(iemOp_test_Eb_Gb)
11051{
11052 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11053 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11054 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11055}
11056
11057
11058/** Opcode 0x85. */
11059FNIEMOP_DEF(iemOp_test_Ev_Gv)
11060{
11061 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11062 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11063 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11064}
11065
11066
11067/** Opcode 0x86. */
11068FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11069{
11070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11071 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11072
11073 /*
11074 * If rm is denoting a register, no more instruction bytes.
11075 */
11076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11077 {
11078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11079
11080 IEM_MC_BEGIN(0, 2);
11081 IEM_MC_LOCAL(uint8_t, uTmp1);
11082 IEM_MC_LOCAL(uint8_t, uTmp2);
11083
11084 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11085 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11086 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11087 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11088
11089 IEM_MC_ADVANCE_RIP();
11090 IEM_MC_END();
11091 }
11092 else
11093 {
11094 /*
11095 * We're accessing memory.
11096 */
11097/** @todo the register must be committed separately! */
11098 IEM_MC_BEGIN(2, 2);
11099 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11100 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11102
11103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11104 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11105 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11106 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11107 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11108
11109 IEM_MC_ADVANCE_RIP();
11110 IEM_MC_END();
11111 }
11112 return VINF_SUCCESS;
11113}
11114
11115
11116/** Opcode 0x87. */
11117FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11118{
11119 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11121
11122 /*
11123 * If rm is denoting a register, no more instruction bytes.
11124 */
11125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11126 {
11127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11128
11129 switch (pVCpu->iem.s.enmEffOpSize)
11130 {
11131 case IEMMODE_16BIT:
11132 IEM_MC_BEGIN(0, 2);
11133 IEM_MC_LOCAL(uint16_t, uTmp1);
11134 IEM_MC_LOCAL(uint16_t, uTmp2);
11135
11136 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11137 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11138 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11139 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11140
11141 IEM_MC_ADVANCE_RIP();
11142 IEM_MC_END();
11143 return VINF_SUCCESS;
11144
11145 case IEMMODE_32BIT:
11146 IEM_MC_BEGIN(0, 2);
11147 IEM_MC_LOCAL(uint32_t, uTmp1);
11148 IEM_MC_LOCAL(uint32_t, uTmp2);
11149
11150 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11151 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11152 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11153 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11154
11155 IEM_MC_ADVANCE_RIP();
11156 IEM_MC_END();
11157 return VINF_SUCCESS;
11158
11159 case IEMMODE_64BIT:
11160 IEM_MC_BEGIN(0, 2);
11161 IEM_MC_LOCAL(uint64_t, uTmp1);
11162 IEM_MC_LOCAL(uint64_t, uTmp2);
11163
11164 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11165 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11166 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11167 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11168
11169 IEM_MC_ADVANCE_RIP();
11170 IEM_MC_END();
11171 return VINF_SUCCESS;
11172
11173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11174 }
11175 }
11176 else
11177 {
11178 /*
11179 * We're accessing memory.
11180 */
11181 switch (pVCpu->iem.s.enmEffOpSize)
11182 {
11183/** @todo the register must be committed separately! */
11184 case IEMMODE_16BIT:
11185 IEM_MC_BEGIN(2, 2);
11186 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11187 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11189
11190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11191 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11192 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11193 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11194 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11195
11196 IEM_MC_ADVANCE_RIP();
11197 IEM_MC_END();
11198 return VINF_SUCCESS;
11199
11200 case IEMMODE_32BIT:
11201 IEM_MC_BEGIN(2, 2);
11202 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11203 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11205
11206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11207 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11208 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11209 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11210 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11211
11212 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11213 IEM_MC_ADVANCE_RIP();
11214 IEM_MC_END();
11215 return VINF_SUCCESS;
11216
11217 case IEMMODE_64BIT:
11218 IEM_MC_BEGIN(2, 2);
11219 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11220 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11222
11223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11224 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11225 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11226 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11227 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11228
11229 IEM_MC_ADVANCE_RIP();
11230 IEM_MC_END();
11231 return VINF_SUCCESS;
11232
11233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11234 }
11235 }
11236}
11237
11238
11239/** Opcode 0x88. */
11240FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11241{
11242 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11243
11244 uint8_t bRm;
11245 IEM_OPCODE_GET_NEXT_U8(&bRm);
11246
11247 /*
11248 * If rm is denoting a register, no more instruction bytes.
11249 */
11250 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11251 {
11252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11253 IEM_MC_BEGIN(0, 1);
11254 IEM_MC_LOCAL(uint8_t, u8Value);
11255 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11256 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11257 IEM_MC_ADVANCE_RIP();
11258 IEM_MC_END();
11259 }
11260 else
11261 {
11262 /*
11263 * We're writing a register to memory.
11264 */
11265 IEM_MC_BEGIN(0, 2);
11266 IEM_MC_LOCAL(uint8_t, u8Value);
11267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11270 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11271 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11272 IEM_MC_ADVANCE_RIP();
11273 IEM_MC_END();
11274 }
11275 return VINF_SUCCESS;
11276
11277}
11278
11279
11280/** Opcode 0x89. */
11281FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11282{
11283 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11284
11285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11286
11287 /*
11288 * If rm is denoting a register, no more instruction bytes.
11289 */
11290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11291 {
11292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11293 switch (pVCpu->iem.s.enmEffOpSize)
11294 {
11295 case IEMMODE_16BIT:
11296 IEM_MC_BEGIN(0, 1);
11297 IEM_MC_LOCAL(uint16_t, u16Value);
11298 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11299 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11300 IEM_MC_ADVANCE_RIP();
11301 IEM_MC_END();
11302 break;
11303
11304 case IEMMODE_32BIT:
11305 IEM_MC_BEGIN(0, 1);
11306 IEM_MC_LOCAL(uint32_t, u32Value);
11307 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11308 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11309 IEM_MC_ADVANCE_RIP();
11310 IEM_MC_END();
11311 break;
11312
11313 case IEMMODE_64BIT:
11314 IEM_MC_BEGIN(0, 1);
11315 IEM_MC_LOCAL(uint64_t, u64Value);
11316 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11317 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11318 IEM_MC_ADVANCE_RIP();
11319 IEM_MC_END();
11320 break;
11321 }
11322 }
11323 else
11324 {
11325 /*
11326 * We're writing a register to memory.
11327 */
11328 switch (pVCpu->iem.s.enmEffOpSize)
11329 {
11330 case IEMMODE_16BIT:
11331 IEM_MC_BEGIN(0, 2);
11332 IEM_MC_LOCAL(uint16_t, u16Value);
11333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11336 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11337 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11338 IEM_MC_ADVANCE_RIP();
11339 IEM_MC_END();
11340 break;
11341
11342 case IEMMODE_32BIT:
11343 IEM_MC_BEGIN(0, 2);
11344 IEM_MC_LOCAL(uint32_t, u32Value);
11345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11348 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11349 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11350 IEM_MC_ADVANCE_RIP();
11351 IEM_MC_END();
11352 break;
11353
11354 case IEMMODE_64BIT:
11355 IEM_MC_BEGIN(0, 2);
11356 IEM_MC_LOCAL(uint64_t, u64Value);
11357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11360 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11361 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11362 IEM_MC_ADVANCE_RIP();
11363 IEM_MC_END();
11364 break;
11365 }
11366 }
11367 return VINF_SUCCESS;
11368}
11369
11370
11371/** Opcode 0x8a. */
11372FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11373{
11374 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11375
11376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11377
11378 /*
11379 * If rm is denoting a register, no more instruction bytes.
11380 */
11381 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11382 {
11383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11384 IEM_MC_BEGIN(0, 1);
11385 IEM_MC_LOCAL(uint8_t, u8Value);
11386 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11387 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11388 IEM_MC_ADVANCE_RIP();
11389 IEM_MC_END();
11390 }
11391 else
11392 {
11393 /*
11394 * We're loading a register from memory.
11395 */
11396 IEM_MC_BEGIN(0, 2);
11397 IEM_MC_LOCAL(uint8_t, u8Value);
11398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11401 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11402 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11403 IEM_MC_ADVANCE_RIP();
11404 IEM_MC_END();
11405 }
11406 return VINF_SUCCESS;
11407}
11408
11409
11410/** Opcode 0x8b. */
11411FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11412{
11413 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11414
11415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11416
11417 /*
11418 * If rm is denoting a register, no more instruction bytes.
11419 */
11420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11421 {
11422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11423 switch (pVCpu->iem.s.enmEffOpSize)
11424 {
11425 case IEMMODE_16BIT:
11426 IEM_MC_BEGIN(0, 1);
11427 IEM_MC_LOCAL(uint16_t, u16Value);
11428 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11429 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11430 IEM_MC_ADVANCE_RIP();
11431 IEM_MC_END();
11432 break;
11433
11434 case IEMMODE_32BIT:
11435 IEM_MC_BEGIN(0, 1);
11436 IEM_MC_LOCAL(uint32_t, u32Value);
11437 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11438 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11439 IEM_MC_ADVANCE_RIP();
11440 IEM_MC_END();
11441 break;
11442
11443 case IEMMODE_64BIT:
11444 IEM_MC_BEGIN(0, 1);
11445 IEM_MC_LOCAL(uint64_t, u64Value);
11446 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11447 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11448 IEM_MC_ADVANCE_RIP();
11449 IEM_MC_END();
11450 break;
11451 }
11452 }
11453 else
11454 {
11455 /*
11456 * We're loading a register from memory.
11457 */
11458 switch (pVCpu->iem.s.enmEffOpSize)
11459 {
11460 case IEMMODE_16BIT:
11461 IEM_MC_BEGIN(0, 2);
11462 IEM_MC_LOCAL(uint16_t, u16Value);
11463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11466 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11467 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11468 IEM_MC_ADVANCE_RIP();
11469 IEM_MC_END();
11470 break;
11471
11472 case IEMMODE_32BIT:
11473 IEM_MC_BEGIN(0, 2);
11474 IEM_MC_LOCAL(uint32_t, u32Value);
11475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11478 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11479 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11480 IEM_MC_ADVANCE_RIP();
11481 IEM_MC_END();
11482 break;
11483
11484 case IEMMODE_64BIT:
11485 IEM_MC_BEGIN(0, 2);
11486 IEM_MC_LOCAL(uint64_t, u64Value);
11487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11490 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11491 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11492 IEM_MC_ADVANCE_RIP();
11493 IEM_MC_END();
11494 break;
11495 }
11496 }
11497 return VINF_SUCCESS;
11498}
11499
11500
11501/** Opcode 0x63. */
11502FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11503{
11504 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11505 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11506 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11507 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11508 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11509}
11510
11511
11512/** Opcode 0x8c. */
11513FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11514{
11515 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11516
11517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11518
11519 /*
11520 * Check that the destination register exists. The REX.R prefix is ignored.
11521 */
11522 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11523 if ( iSegReg > X86_SREG_GS)
11524 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11525
11526 /*
11527 * If rm is denoting a register, no more instruction bytes.
11528 * In that case, the operand size is respected and the upper bits are
11529 * cleared (starting with some pentium).
11530 */
11531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11532 {
11533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11534 switch (pVCpu->iem.s.enmEffOpSize)
11535 {
11536 case IEMMODE_16BIT:
11537 IEM_MC_BEGIN(0, 1);
11538 IEM_MC_LOCAL(uint16_t, u16Value);
11539 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11540 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11541 IEM_MC_ADVANCE_RIP();
11542 IEM_MC_END();
11543 break;
11544
11545 case IEMMODE_32BIT:
11546 IEM_MC_BEGIN(0, 1);
11547 IEM_MC_LOCAL(uint32_t, u32Value);
11548 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11549 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11550 IEM_MC_ADVANCE_RIP();
11551 IEM_MC_END();
11552 break;
11553
11554 case IEMMODE_64BIT:
11555 IEM_MC_BEGIN(0, 1);
11556 IEM_MC_LOCAL(uint64_t, u64Value);
11557 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11558 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11559 IEM_MC_ADVANCE_RIP();
11560 IEM_MC_END();
11561 break;
11562 }
11563 }
11564 else
11565 {
11566 /*
11567 * We're saving the register to memory. The access is word sized
11568 * regardless of operand size prefixes.
11569 */
11570#if 0 /* not necessary */
11571 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11572#endif
11573 IEM_MC_BEGIN(0, 2);
11574 IEM_MC_LOCAL(uint16_t, u16Value);
11575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11578 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11579 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11580 IEM_MC_ADVANCE_RIP();
11581 IEM_MC_END();
11582 }
11583 return VINF_SUCCESS;
11584}
11585
11586
11587
11588
11589/** Opcode 0x8d. */
11590FNIEMOP_DEF(iemOp_lea_Gv_M)
11591{
11592 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11594 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11595 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11596
11597 switch (pVCpu->iem.s.enmEffOpSize)
11598 {
11599 case IEMMODE_16BIT:
11600 IEM_MC_BEGIN(0, 2);
11601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11602 IEM_MC_LOCAL(uint16_t, u16Cast);
11603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11605 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11606 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11607 IEM_MC_ADVANCE_RIP();
11608 IEM_MC_END();
11609 return VINF_SUCCESS;
11610
11611 case IEMMODE_32BIT:
11612 IEM_MC_BEGIN(0, 2);
11613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11614 IEM_MC_LOCAL(uint32_t, u32Cast);
11615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11617 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11618 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11619 IEM_MC_ADVANCE_RIP();
11620 IEM_MC_END();
11621 return VINF_SUCCESS;
11622
11623 case IEMMODE_64BIT:
11624 IEM_MC_BEGIN(0, 1);
11625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11628 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11629 IEM_MC_ADVANCE_RIP();
11630 IEM_MC_END();
11631 return VINF_SUCCESS;
11632 }
11633 AssertFailedReturn(VERR_IEM_IPE_7);
11634}
11635
11636
11637/** Opcode 0x8e. */
11638FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11639{
11640 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11641
11642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11643
11644 /*
11645 * The practical operand size is 16-bit.
11646 */
11647#if 0 /* not necessary */
11648 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11649#endif
11650
11651 /*
11652 * Check that the destination register exists and can be used with this
11653 * instruction. The REX.R prefix is ignored.
11654 */
11655 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11656 if ( iSegReg == X86_SREG_CS
11657 || iSegReg > X86_SREG_GS)
11658 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11659
11660 /*
11661 * If rm is denoting a register, no more instruction bytes.
11662 */
11663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11664 {
11665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11666 IEM_MC_BEGIN(2, 0);
11667 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11668 IEM_MC_ARG(uint16_t, u16Value, 1);
11669 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11670 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11671 IEM_MC_END();
11672 }
11673 else
11674 {
11675 /*
11676 * We're loading the register from memory. The access is word sized
11677 * regardless of operand size prefixes.
11678 */
11679 IEM_MC_BEGIN(2, 1);
11680 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11681 IEM_MC_ARG(uint16_t, u16Value, 1);
11682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11685 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11686 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11687 IEM_MC_END();
11688 }
11689 return VINF_SUCCESS;
11690}
11691
11692
11693/** Opcode 0x8f /0. */
11694FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11695{
11696 /* This bugger is rather annoying as it requires rSP to be updated before
11697 doing the effective address calculations. Will eventually require a
11698 split between the R/M+SIB decoding and the effective address
11699 calculation - which is something that is required for any attempt at
11700 reusing this code for a recompiler. It may also be good to have if we
11701 need to delay #UD exception caused by invalid lock prefixes.
11702
11703 For now, we'll do a mostly safe interpreter-only implementation here. */
11704 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11705 * now until tests show it's checked.. */
11706 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11707
11708 /* Register access is relatively easy and can share code. */
11709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11710 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11711
11712 /*
11713 * Memory target.
11714 *
11715 * Intel says that RSP is incremented before it's used in any effective
11716 * address calcuations. This means some serious extra annoyance here since
11717 * we decode and calculate the effective address in one step and like to
11718 * delay committing registers till everything is done.
11719 *
11720 * So, we'll decode and calculate the effective address twice. This will
11721 * require some recoding if turned into a recompiler.
11722 */
11723 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11724
11725#ifndef TST_IEM_CHECK_MC
11726 /* Calc effective address with modified ESP. */
11727/** @todo testcase */
11728 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11729 RTGCPTR GCPtrEff;
11730 VBOXSTRICTRC rcStrict;
11731 switch (pVCpu->iem.s.enmEffOpSize)
11732 {
11733 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11734 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11735 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11737 }
11738 if (rcStrict != VINF_SUCCESS)
11739 return rcStrict;
11740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11741
11742 /* Perform the operation - this should be CImpl. */
11743 RTUINT64U TmpRsp;
11744 TmpRsp.u = pCtx->rsp;
11745 switch (pVCpu->iem.s.enmEffOpSize)
11746 {
11747 case IEMMODE_16BIT:
11748 {
11749 uint16_t u16Value;
11750 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11751 if (rcStrict == VINF_SUCCESS)
11752 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11753 break;
11754 }
11755
11756 case IEMMODE_32BIT:
11757 {
11758 uint32_t u32Value;
11759 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11760 if (rcStrict == VINF_SUCCESS)
11761 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11762 break;
11763 }
11764
11765 case IEMMODE_64BIT:
11766 {
11767 uint64_t u64Value;
11768 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11769 if (rcStrict == VINF_SUCCESS)
11770 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11771 break;
11772 }
11773
11774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11775 }
11776 if (rcStrict == VINF_SUCCESS)
11777 {
11778 pCtx->rsp = TmpRsp.u;
11779 iemRegUpdateRipAndClearRF(pVCpu);
11780 }
11781 return rcStrict;
11782
11783#else
11784 return VERR_IEM_IPE_2;
11785#endif
11786}
11787
11788
11789/** Opcode 0x8f. */
11790FNIEMOP_DEF(iemOp_Grp1A)
11791{
11792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11793 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11794 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11795
11796 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11797 /** @todo XOP decoding. */
11798 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11799 return IEMOP_RAISE_INVALID_OPCODE();
11800}
11801
11802
11803/**
11804 * Common 'xchg reg,rAX' helper.
11805 */
11806FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11807{
11808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11809
11810 iReg |= pVCpu->iem.s.uRexB;
11811 switch (pVCpu->iem.s.enmEffOpSize)
11812 {
11813 case IEMMODE_16BIT:
11814 IEM_MC_BEGIN(0, 2);
11815 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11816 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11817 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11818 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11819 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11820 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11821 IEM_MC_ADVANCE_RIP();
11822 IEM_MC_END();
11823 return VINF_SUCCESS;
11824
11825 case IEMMODE_32BIT:
11826 IEM_MC_BEGIN(0, 2);
11827 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11828 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11829 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11830 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11831 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11832 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11833 IEM_MC_ADVANCE_RIP();
11834 IEM_MC_END();
11835 return VINF_SUCCESS;
11836
11837 case IEMMODE_64BIT:
11838 IEM_MC_BEGIN(0, 2);
11839 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11840 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11841 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11842 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11843 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11844 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11845 IEM_MC_ADVANCE_RIP();
11846 IEM_MC_END();
11847 return VINF_SUCCESS;
11848
11849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11850 }
11851}
11852
11853
11854/** Opcode 0x90. */
11855FNIEMOP_DEF(iemOp_nop)
11856{
11857 /* R8/R8D and RAX/EAX can be exchanged. */
11858 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11859 {
11860 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11861 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11862 }
11863
11864 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11865 IEMOP_MNEMONIC(pause, "pause");
11866 else
11867 IEMOP_MNEMONIC(nop, "nop");
11868 IEM_MC_BEGIN(0, 0);
11869 IEM_MC_ADVANCE_RIP();
11870 IEM_MC_END();
11871 return VINF_SUCCESS;
11872}
11873
11874
11875/** Opcode 0x91. */
11876FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11877{
11878 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11879 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11880}
11881
11882
11883/** Opcode 0x92. */
11884FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11885{
11886 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11887 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11888}
11889
11890
11891/** Opcode 0x93. */
11892FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11893{
11894 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11895 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11896}
11897
11898
11899/** Opcode 0x94. */
11900FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11901{
11902 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11903 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11904}
11905
11906
11907/** Opcode 0x95. */
11908FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11909{
11910 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11911 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11912}
11913
11914
11915/** Opcode 0x96. */
11916FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11917{
11918 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11919 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11920}
11921
11922
11923/** Opcode 0x97. */
11924FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11925{
11926 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11927 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11928}
11929
11930
11931/** Opcode 0x98. */
11932FNIEMOP_DEF(iemOp_cbw)
11933{
11934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11935 switch (pVCpu->iem.s.enmEffOpSize)
11936 {
11937 case IEMMODE_16BIT:
11938 IEMOP_MNEMONIC(cbw, "cbw");
11939 IEM_MC_BEGIN(0, 1);
11940 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11941 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11942 } IEM_MC_ELSE() {
11943 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11944 } IEM_MC_ENDIF();
11945 IEM_MC_ADVANCE_RIP();
11946 IEM_MC_END();
11947 return VINF_SUCCESS;
11948
11949 case IEMMODE_32BIT:
11950 IEMOP_MNEMONIC(cwde, "cwde");
11951 IEM_MC_BEGIN(0, 1);
11952 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11953 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11954 } IEM_MC_ELSE() {
11955 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11956 } IEM_MC_ENDIF();
11957 IEM_MC_ADVANCE_RIP();
11958 IEM_MC_END();
11959 return VINF_SUCCESS;
11960
11961 case IEMMODE_64BIT:
11962 IEMOP_MNEMONIC(cdqe, "cdqe");
11963 IEM_MC_BEGIN(0, 1);
11964 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11965 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11966 } IEM_MC_ELSE() {
11967 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11968 } IEM_MC_ENDIF();
11969 IEM_MC_ADVANCE_RIP();
11970 IEM_MC_END();
11971 return VINF_SUCCESS;
11972
11973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11974 }
11975}
11976
11977
11978/** Opcode 0x99. */
11979FNIEMOP_DEF(iemOp_cwd)
11980{
11981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11982 switch (pVCpu->iem.s.enmEffOpSize)
11983 {
11984 case IEMMODE_16BIT:
11985 IEMOP_MNEMONIC(cwd, "cwd");
11986 IEM_MC_BEGIN(0, 1);
11987 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11988 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11989 } IEM_MC_ELSE() {
11990 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11991 } IEM_MC_ENDIF();
11992 IEM_MC_ADVANCE_RIP();
11993 IEM_MC_END();
11994 return VINF_SUCCESS;
11995
11996 case IEMMODE_32BIT:
11997 IEMOP_MNEMONIC(cdq, "cdq");
11998 IEM_MC_BEGIN(0, 1);
11999 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12000 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
12001 } IEM_MC_ELSE() {
12002 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
12003 } IEM_MC_ENDIF();
12004 IEM_MC_ADVANCE_RIP();
12005 IEM_MC_END();
12006 return VINF_SUCCESS;
12007
12008 case IEMMODE_64BIT:
12009 IEMOP_MNEMONIC(cqo, "cqo");
12010 IEM_MC_BEGIN(0, 1);
12011 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
12012 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
12013 } IEM_MC_ELSE() {
12014 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
12015 } IEM_MC_ENDIF();
12016 IEM_MC_ADVANCE_RIP();
12017 IEM_MC_END();
12018 return VINF_SUCCESS;
12019
12020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12021 }
12022}
12023
12024
12025/** Opcode 0x9a. */
12026FNIEMOP_DEF(iemOp_call_Ap)
12027{
12028 IEMOP_MNEMONIC(call_Ap, "call Ap");
12029 IEMOP_HLP_NO_64BIT();
12030
12031 /* Decode the far pointer address and pass it on to the far call C implementation. */
12032 uint32_t offSeg;
12033 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12034 IEM_OPCODE_GET_NEXT_U32(&offSeg);
12035 else
12036 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
12037 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
12038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12039 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
12040}
12041
12042
12043/** Opcode 0x9b. (aka fwait) */
12044FNIEMOP_DEF(iemOp_wait)
12045{
12046 IEMOP_MNEMONIC(wait, "wait");
12047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12048
12049 IEM_MC_BEGIN(0, 0);
12050 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12051 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12052 IEM_MC_ADVANCE_RIP();
12053 IEM_MC_END();
12054 return VINF_SUCCESS;
12055}
12056
12057
12058/** Opcode 0x9c. */
12059FNIEMOP_DEF(iemOp_pushf_Fv)
12060{
12061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12062 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12063 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12064}
12065
12066
12067/** Opcode 0x9d. */
12068FNIEMOP_DEF(iemOp_popf_Fv)
12069{
12070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12071 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12072 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12073}
12074
12075
12076/** Opcode 0x9e. */
12077FNIEMOP_DEF(iemOp_sahf)
12078{
12079 IEMOP_MNEMONIC(sahf, "sahf");
12080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12081 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12082 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12083 return IEMOP_RAISE_INVALID_OPCODE();
12084 IEM_MC_BEGIN(0, 2);
12085 IEM_MC_LOCAL(uint32_t, u32Flags);
12086 IEM_MC_LOCAL(uint32_t, EFlags);
12087 IEM_MC_FETCH_EFLAGS(EFlags);
12088 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12089 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12090 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12091 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12092 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12093 IEM_MC_COMMIT_EFLAGS(EFlags);
12094 IEM_MC_ADVANCE_RIP();
12095 IEM_MC_END();
12096 return VINF_SUCCESS;
12097}
12098
12099
12100/** Opcode 0x9f. */
12101FNIEMOP_DEF(iemOp_lahf)
12102{
12103 IEMOP_MNEMONIC(lahf, "lahf");
12104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12105 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12106 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12107 return IEMOP_RAISE_INVALID_OPCODE();
12108 IEM_MC_BEGIN(0, 1);
12109 IEM_MC_LOCAL(uint8_t, u8Flags);
12110 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12111 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12112 IEM_MC_ADVANCE_RIP();
12113 IEM_MC_END();
12114 return VINF_SUCCESS;
12115}
12116
12117
12118/**
12119 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12120 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12121 * prefixes. Will return on failures.
12122 * @param a_GCPtrMemOff The variable to store the offset in.
12123 */
12124#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12125 do \
12126 { \
12127 switch (pVCpu->iem.s.enmEffAddrMode) \
12128 { \
12129 case IEMMODE_16BIT: \
12130 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12131 break; \
12132 case IEMMODE_32BIT: \
12133 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12134 break; \
12135 case IEMMODE_64BIT: \
12136 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12137 break; \
12138 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12139 } \
12140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12141 } while (0)
12142
12143/** Opcode 0xa0. */
12144FNIEMOP_DEF(iemOp_mov_Al_Ob)
12145{
12146 /*
12147 * Get the offset and fend of lock prefixes.
12148 */
12149 RTGCPTR GCPtrMemOff;
12150 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12151
12152 /*
12153 * Fetch AL.
12154 */
12155 IEM_MC_BEGIN(0,1);
12156 IEM_MC_LOCAL(uint8_t, u8Tmp);
12157 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12158 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12159 IEM_MC_ADVANCE_RIP();
12160 IEM_MC_END();
12161 return VINF_SUCCESS;
12162}
12163
12164
12165/** Opcode 0xa1. */
12166FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12167{
12168 /*
12169 * Get the offset and fend of lock prefixes.
12170 */
12171 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12172 RTGCPTR GCPtrMemOff;
12173 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12174
12175 /*
12176 * Fetch rAX.
12177 */
12178 switch (pVCpu->iem.s.enmEffOpSize)
12179 {
12180 case IEMMODE_16BIT:
12181 IEM_MC_BEGIN(0,1);
12182 IEM_MC_LOCAL(uint16_t, u16Tmp);
12183 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12184 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12185 IEM_MC_ADVANCE_RIP();
12186 IEM_MC_END();
12187 return VINF_SUCCESS;
12188
12189 case IEMMODE_32BIT:
12190 IEM_MC_BEGIN(0,1);
12191 IEM_MC_LOCAL(uint32_t, u32Tmp);
12192 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12193 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12194 IEM_MC_ADVANCE_RIP();
12195 IEM_MC_END();
12196 return VINF_SUCCESS;
12197
12198 case IEMMODE_64BIT:
12199 IEM_MC_BEGIN(0,1);
12200 IEM_MC_LOCAL(uint64_t, u64Tmp);
12201 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12202 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12203 IEM_MC_ADVANCE_RIP();
12204 IEM_MC_END();
12205 return VINF_SUCCESS;
12206
12207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12208 }
12209}
12210
12211
12212/** Opcode 0xa2. */
12213FNIEMOP_DEF(iemOp_mov_Ob_AL)
12214{
12215 /*
12216 * Get the offset and fend of lock prefixes.
12217 */
12218 RTGCPTR GCPtrMemOff;
12219 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12220
12221 /*
12222 * Store AL.
12223 */
12224 IEM_MC_BEGIN(0,1);
12225 IEM_MC_LOCAL(uint8_t, u8Tmp);
12226 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12227 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12228 IEM_MC_ADVANCE_RIP();
12229 IEM_MC_END();
12230 return VINF_SUCCESS;
12231}
12232
12233
12234/** Opcode 0xa3. */
12235FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12236{
12237 /*
12238 * Get the offset and fend of lock prefixes.
12239 */
12240 RTGCPTR GCPtrMemOff;
12241 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12242
12243 /*
12244 * Store rAX.
12245 */
12246 switch (pVCpu->iem.s.enmEffOpSize)
12247 {
12248 case IEMMODE_16BIT:
12249 IEM_MC_BEGIN(0,1);
12250 IEM_MC_LOCAL(uint16_t, u16Tmp);
12251 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12252 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12253 IEM_MC_ADVANCE_RIP();
12254 IEM_MC_END();
12255 return VINF_SUCCESS;
12256
12257 case IEMMODE_32BIT:
12258 IEM_MC_BEGIN(0,1);
12259 IEM_MC_LOCAL(uint32_t, u32Tmp);
12260 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12261 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12262 IEM_MC_ADVANCE_RIP();
12263 IEM_MC_END();
12264 return VINF_SUCCESS;
12265
12266 case IEMMODE_64BIT:
12267 IEM_MC_BEGIN(0,1);
12268 IEM_MC_LOCAL(uint64_t, u64Tmp);
12269 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12270 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12271 IEM_MC_ADVANCE_RIP();
12272 IEM_MC_END();
12273 return VINF_SUCCESS;
12274
12275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12276 }
12277}
12278
12279/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12280#define IEM_MOVS_CASE(ValBits, AddrBits) \
12281 IEM_MC_BEGIN(0, 2); \
12282 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12283 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12284 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12285 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12286 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12287 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12289 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12290 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12291 } IEM_MC_ELSE() { \
12292 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12293 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12294 } IEM_MC_ENDIF(); \
12295 IEM_MC_ADVANCE_RIP(); \
12296 IEM_MC_END();
12297
12298/** Opcode 0xa4. */
12299FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12300{
12301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12302
12303 /*
12304 * Use the C implementation if a repeat prefix is encountered.
12305 */
12306 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12307 {
12308 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12309 switch (pVCpu->iem.s.enmEffAddrMode)
12310 {
12311 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12312 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12313 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12315 }
12316 }
12317 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12318
12319 /*
12320 * Sharing case implementation with movs[wdq] below.
12321 */
12322 switch (pVCpu->iem.s.enmEffAddrMode)
12323 {
12324 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12325 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12326 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12328 }
12329 return VINF_SUCCESS;
12330}
12331
12332
12333/** Opcode 0xa5. */
12334FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12335{
12336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12337
12338 /*
12339 * Use the C implementation if a repeat prefix is encountered.
12340 */
12341 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12342 {
12343 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12344 switch (pVCpu->iem.s.enmEffOpSize)
12345 {
12346 case IEMMODE_16BIT:
12347 switch (pVCpu->iem.s.enmEffAddrMode)
12348 {
12349 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12350 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12351 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12353 }
12354 break;
12355 case IEMMODE_32BIT:
12356 switch (pVCpu->iem.s.enmEffAddrMode)
12357 {
12358 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12359 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12360 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12362 }
12363 case IEMMODE_64BIT:
12364 switch (pVCpu->iem.s.enmEffAddrMode)
12365 {
12366 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12367 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12368 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12370 }
12371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12372 }
12373 }
12374 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12375
12376 /*
12377 * Annoying double switch here.
12378 * Using ugly macro for implementing the cases, sharing it with movsb.
12379 */
12380 switch (pVCpu->iem.s.enmEffOpSize)
12381 {
12382 case IEMMODE_16BIT:
12383 switch (pVCpu->iem.s.enmEffAddrMode)
12384 {
12385 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12386 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12387 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12389 }
12390 break;
12391
12392 case IEMMODE_32BIT:
12393 switch (pVCpu->iem.s.enmEffAddrMode)
12394 {
12395 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12396 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12397 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12399 }
12400 break;
12401
12402 case IEMMODE_64BIT:
12403 switch (pVCpu->iem.s.enmEffAddrMode)
12404 {
12405 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12406 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12407 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12409 }
12410 break;
12411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12412 }
12413 return VINF_SUCCESS;
12414}
12415
12416#undef IEM_MOVS_CASE
12417
12418/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12419#define IEM_CMPS_CASE(ValBits, AddrBits) \
12420 IEM_MC_BEGIN(3, 3); \
12421 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12422 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12423 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12424 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12425 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12426 \
12427 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12428 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12429 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12430 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12431 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12432 IEM_MC_REF_EFLAGS(pEFlags); \
12433 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12434 \
12435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12436 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12437 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12438 } IEM_MC_ELSE() { \
12439 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12440 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12441 } IEM_MC_ENDIF(); \
12442 IEM_MC_ADVANCE_RIP(); \
12443 IEM_MC_END(); \
12444
12445/** Opcode 0xa6. */
12446FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12447{
12448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12449
12450 /*
12451 * Use the C implementation if a repeat prefix is encountered.
12452 */
12453 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12454 {
12455 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12456 switch (pVCpu->iem.s.enmEffAddrMode)
12457 {
12458 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12459 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12460 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12462 }
12463 }
12464 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12465 {
12466 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12467 switch (pVCpu->iem.s.enmEffAddrMode)
12468 {
12469 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12470 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12471 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12473 }
12474 }
12475 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12476
12477 /*
12478 * Sharing case implementation with cmps[wdq] below.
12479 */
12480 switch (pVCpu->iem.s.enmEffAddrMode)
12481 {
12482 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12483 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12484 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12486 }
12487 return VINF_SUCCESS;
12488
12489}
12490
12491
12492/** Opcode 0xa7. */
12493FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12494{
12495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12496
12497 /*
12498 * Use the C implementation if a repeat prefix is encountered.
12499 */
12500 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12501 {
12502 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12503 switch (pVCpu->iem.s.enmEffOpSize)
12504 {
12505 case IEMMODE_16BIT:
12506 switch (pVCpu->iem.s.enmEffAddrMode)
12507 {
12508 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12509 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12510 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12512 }
12513 break;
12514 case IEMMODE_32BIT:
12515 switch (pVCpu->iem.s.enmEffAddrMode)
12516 {
12517 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12518 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12519 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12521 }
12522 case IEMMODE_64BIT:
12523 switch (pVCpu->iem.s.enmEffAddrMode)
12524 {
12525 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12526 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12527 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12529 }
12530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12531 }
12532 }
12533
12534 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12535 {
12536 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12537 switch (pVCpu->iem.s.enmEffOpSize)
12538 {
12539 case IEMMODE_16BIT:
12540 switch (pVCpu->iem.s.enmEffAddrMode)
12541 {
12542 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12543 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12544 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12546 }
12547 break;
12548 case IEMMODE_32BIT:
12549 switch (pVCpu->iem.s.enmEffAddrMode)
12550 {
12551 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12552 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12553 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12555 }
12556 case IEMMODE_64BIT:
12557 switch (pVCpu->iem.s.enmEffAddrMode)
12558 {
12559 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12560 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12561 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12563 }
12564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12565 }
12566 }
12567
12568 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12569
12570 /*
12571 * Annoying double switch here.
12572 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12573 */
12574 switch (pVCpu->iem.s.enmEffOpSize)
12575 {
12576 case IEMMODE_16BIT:
12577 switch (pVCpu->iem.s.enmEffAddrMode)
12578 {
12579 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12580 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12581 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12583 }
12584 break;
12585
12586 case IEMMODE_32BIT:
12587 switch (pVCpu->iem.s.enmEffAddrMode)
12588 {
12589 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12590 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12591 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12593 }
12594 break;
12595
12596 case IEMMODE_64BIT:
12597 switch (pVCpu->iem.s.enmEffAddrMode)
12598 {
12599 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12600 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12601 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12603 }
12604 break;
12605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12606 }
12607 return VINF_SUCCESS;
12608
12609}
12610
12611#undef IEM_CMPS_CASE
12612
12613/** Opcode 0xa8. */
12614FNIEMOP_DEF(iemOp_test_AL_Ib)
12615{
12616 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12617 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12618 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12619}
12620
12621
12622/** Opcode 0xa9. */
12623FNIEMOP_DEF(iemOp_test_eAX_Iz)
12624{
12625 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12626 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12627 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12628}
12629
12630
12631/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12632#define IEM_STOS_CASE(ValBits, AddrBits) \
12633 IEM_MC_BEGIN(0, 2); \
12634 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12635 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12636 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12637 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12638 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12639 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12640 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12641 } IEM_MC_ELSE() { \
12642 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12643 } IEM_MC_ENDIF(); \
12644 IEM_MC_ADVANCE_RIP(); \
12645 IEM_MC_END(); \
12646
12647/** Opcode 0xaa. */
12648FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12649{
12650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12651
12652 /*
12653 * Use the C implementation if a repeat prefix is encountered.
12654 */
12655 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12656 {
12657 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12658 switch (pVCpu->iem.s.enmEffAddrMode)
12659 {
12660 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12661 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12662 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12664 }
12665 }
12666 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12667
12668 /*
12669 * Sharing case implementation with stos[wdq] below.
12670 */
12671 switch (pVCpu->iem.s.enmEffAddrMode)
12672 {
12673 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12674 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12675 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12677 }
12678 return VINF_SUCCESS;
12679}
12680
12681
12682/** Opcode 0xab. */
12683FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12684{
12685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12686
12687 /*
12688 * Use the C implementation if a repeat prefix is encountered.
12689 */
12690 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12691 {
12692 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12693 switch (pVCpu->iem.s.enmEffOpSize)
12694 {
12695 case IEMMODE_16BIT:
12696 switch (pVCpu->iem.s.enmEffAddrMode)
12697 {
12698 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12699 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12700 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12702 }
12703 break;
12704 case IEMMODE_32BIT:
12705 switch (pVCpu->iem.s.enmEffAddrMode)
12706 {
12707 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12708 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12709 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12711 }
12712 case IEMMODE_64BIT:
12713 switch (pVCpu->iem.s.enmEffAddrMode)
12714 {
12715 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12716 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12717 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12719 }
12720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12721 }
12722 }
12723 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12724
12725 /*
12726 * Annoying double switch here.
12727 * Using ugly macro for implementing the cases, sharing it with stosb.
12728 */
12729 switch (pVCpu->iem.s.enmEffOpSize)
12730 {
12731 case IEMMODE_16BIT:
12732 switch (pVCpu->iem.s.enmEffAddrMode)
12733 {
12734 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12735 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12736 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12738 }
12739 break;
12740
12741 case IEMMODE_32BIT:
12742 switch (pVCpu->iem.s.enmEffAddrMode)
12743 {
12744 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12745 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12746 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12748 }
12749 break;
12750
12751 case IEMMODE_64BIT:
12752 switch (pVCpu->iem.s.enmEffAddrMode)
12753 {
12754 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12755 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12756 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12758 }
12759 break;
12760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12761 }
12762 return VINF_SUCCESS;
12763}
12764
12765#undef IEM_STOS_CASE
12766
12767/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12768#define IEM_LODS_CASE(ValBits, AddrBits) \
12769 IEM_MC_BEGIN(0, 2); \
12770 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12771 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12772 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12773 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12774 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12775 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12776 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12777 } IEM_MC_ELSE() { \
12778 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12779 } IEM_MC_ENDIF(); \
12780 IEM_MC_ADVANCE_RIP(); \
12781 IEM_MC_END();
12782
12783/** Opcode 0xac. */
12784FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12785{
12786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12787
12788 /*
12789 * Use the C implementation if a repeat prefix is encountered.
12790 */
12791 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12792 {
12793 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12794 switch (pVCpu->iem.s.enmEffAddrMode)
12795 {
12796 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12797 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12798 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12800 }
12801 }
12802 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12803
12804 /*
12805 * Sharing case implementation with stos[wdq] below.
12806 */
12807 switch (pVCpu->iem.s.enmEffAddrMode)
12808 {
12809 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12810 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12811 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12812 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12813 }
12814 return VINF_SUCCESS;
12815}
12816
12817
12818/** Opcode 0xad. */
12819FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12820{
12821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12822
12823 /*
12824 * Use the C implementation if a repeat prefix is encountered.
12825 */
12826 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12827 {
12828 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12829 switch (pVCpu->iem.s.enmEffOpSize)
12830 {
12831 case IEMMODE_16BIT:
12832 switch (pVCpu->iem.s.enmEffAddrMode)
12833 {
12834 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12835 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12836 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12838 }
12839 break;
12840 case IEMMODE_32BIT:
12841 switch (pVCpu->iem.s.enmEffAddrMode)
12842 {
12843 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12844 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12845 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12847 }
12848 case IEMMODE_64BIT:
12849 switch (pVCpu->iem.s.enmEffAddrMode)
12850 {
12851 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12852 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12853 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12855 }
12856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12857 }
12858 }
12859 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12860
12861 /*
12862 * Annoying double switch here.
12863 * Using ugly macro for implementing the cases, sharing it with lodsb.
12864 */
12865 switch (pVCpu->iem.s.enmEffOpSize)
12866 {
12867 case IEMMODE_16BIT:
12868 switch (pVCpu->iem.s.enmEffAddrMode)
12869 {
12870 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12871 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12872 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12874 }
12875 break;
12876
12877 case IEMMODE_32BIT:
12878 switch (pVCpu->iem.s.enmEffAddrMode)
12879 {
12880 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12881 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12882 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12884 }
12885 break;
12886
12887 case IEMMODE_64BIT:
12888 switch (pVCpu->iem.s.enmEffAddrMode)
12889 {
12890 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12891 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12892 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12894 }
12895 break;
12896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12897 }
12898 return VINF_SUCCESS;
12899}
12900
12901#undef IEM_LODS_CASE
12902
12903/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12904#define IEM_SCAS_CASE(ValBits, AddrBits) \
12905 IEM_MC_BEGIN(3, 2); \
12906 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12907 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12908 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12909 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12910 \
12911 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12912 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12913 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12914 IEM_MC_REF_EFLAGS(pEFlags); \
12915 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12916 \
12917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12918 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12919 } IEM_MC_ELSE() { \
12920 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12921 } IEM_MC_ENDIF(); \
12922 IEM_MC_ADVANCE_RIP(); \
12923 IEM_MC_END();
12924
12925/** Opcode 0xae. */
12926FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12927{
12928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12929
12930 /*
12931 * Use the C implementation if a repeat prefix is encountered.
12932 */
12933 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12934 {
12935 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12936 switch (pVCpu->iem.s.enmEffAddrMode)
12937 {
12938 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12939 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12940 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12942 }
12943 }
12944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12945 {
12946 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12947 switch (pVCpu->iem.s.enmEffAddrMode)
12948 {
12949 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12950 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12951 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12953 }
12954 }
12955 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12956
12957 /*
12958 * Sharing case implementation with stos[wdq] below.
12959 */
12960 switch (pVCpu->iem.s.enmEffAddrMode)
12961 {
12962 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12963 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12964 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12966 }
12967 return VINF_SUCCESS;
12968}
12969
12970
12971/** Opcode 0xaf. */
12972FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12973{
12974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12975
12976 /*
12977 * Use the C implementation if a repeat prefix is encountered.
12978 */
12979 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12980 {
12981 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12982 switch (pVCpu->iem.s.enmEffOpSize)
12983 {
12984 case IEMMODE_16BIT:
12985 switch (pVCpu->iem.s.enmEffAddrMode)
12986 {
12987 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12988 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12989 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12991 }
12992 break;
12993 case IEMMODE_32BIT:
12994 switch (pVCpu->iem.s.enmEffAddrMode)
12995 {
12996 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12997 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12998 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13000 }
13001 case IEMMODE_64BIT:
13002 switch (pVCpu->iem.s.enmEffAddrMode)
13003 {
13004 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
13005 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
13006 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
13007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13008 }
13009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13010 }
13011 }
13012 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13013 {
13014 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
13015 switch (pVCpu->iem.s.enmEffOpSize)
13016 {
13017 case IEMMODE_16BIT:
13018 switch (pVCpu->iem.s.enmEffAddrMode)
13019 {
13020 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
13021 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
13022 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
13023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13024 }
13025 break;
13026 case IEMMODE_32BIT:
13027 switch (pVCpu->iem.s.enmEffAddrMode)
13028 {
13029 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
13030 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
13031 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
13032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13033 }
13034 case IEMMODE_64BIT:
13035 switch (pVCpu->iem.s.enmEffAddrMode)
13036 {
13037 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
13038 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
13039 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
13040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13041 }
13042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13043 }
13044 }
13045 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
13046
13047 /*
13048 * Annoying double switch here.
13049 * Using ugly macro for implementing the cases, sharing it with scasb.
13050 */
13051 switch (pVCpu->iem.s.enmEffOpSize)
13052 {
13053 case IEMMODE_16BIT:
13054 switch (pVCpu->iem.s.enmEffAddrMode)
13055 {
13056 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13057 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13058 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13060 }
13061 break;
13062
13063 case IEMMODE_32BIT:
13064 switch (pVCpu->iem.s.enmEffAddrMode)
13065 {
13066 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13067 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13068 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13070 }
13071 break;
13072
13073 case IEMMODE_64BIT:
13074 switch (pVCpu->iem.s.enmEffAddrMode)
13075 {
13076 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13077 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13078 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13080 }
13081 break;
13082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13083 }
13084 return VINF_SUCCESS;
13085}
13086
13087#undef IEM_SCAS_CASE
13088
13089/**
13090 * Common 'mov r8, imm8' helper.
13091 */
13092FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13093{
13094 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13096
13097 IEM_MC_BEGIN(0, 1);
13098 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13099 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13100 IEM_MC_ADVANCE_RIP();
13101 IEM_MC_END();
13102
13103 return VINF_SUCCESS;
13104}
13105
13106
13107/** Opcode 0xb0. */
13108FNIEMOP_DEF(iemOp_mov_AL_Ib)
13109{
13110 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13111 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13112}
13113
13114
13115/** Opcode 0xb1. */
13116FNIEMOP_DEF(iemOp_CL_Ib)
13117{
13118 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13119 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13120}
13121
13122
13123/** Opcode 0xb2. */
13124FNIEMOP_DEF(iemOp_DL_Ib)
13125{
13126 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13127 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13128}
13129
13130
13131/** Opcode 0xb3. */
13132FNIEMOP_DEF(iemOp_BL_Ib)
13133{
13134 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13135 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13136}
13137
13138
13139/** Opcode 0xb4. */
13140FNIEMOP_DEF(iemOp_mov_AH_Ib)
13141{
13142 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13143 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13144}
13145
13146
13147/** Opcode 0xb5. */
13148FNIEMOP_DEF(iemOp_CH_Ib)
13149{
13150 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13151 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13152}
13153
13154
13155/** Opcode 0xb6. */
13156FNIEMOP_DEF(iemOp_DH_Ib)
13157{
13158 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13159 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13160}
13161
13162
13163/** Opcode 0xb7. */
13164FNIEMOP_DEF(iemOp_BH_Ib)
13165{
13166 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13167 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13168}
13169
13170
13171/**
13172 * Common 'mov regX,immX' helper.
13173 */
13174FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13175{
13176 switch (pVCpu->iem.s.enmEffOpSize)
13177 {
13178 case IEMMODE_16BIT:
13179 {
13180 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13182
13183 IEM_MC_BEGIN(0, 1);
13184 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13185 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13186 IEM_MC_ADVANCE_RIP();
13187 IEM_MC_END();
13188 break;
13189 }
13190
13191 case IEMMODE_32BIT:
13192 {
13193 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13195
13196 IEM_MC_BEGIN(0, 1);
13197 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13198 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13199 IEM_MC_ADVANCE_RIP();
13200 IEM_MC_END();
13201 break;
13202 }
13203 case IEMMODE_64BIT:
13204 {
13205 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13207
13208 IEM_MC_BEGIN(0, 1);
13209 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13210 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13211 IEM_MC_ADVANCE_RIP();
13212 IEM_MC_END();
13213 break;
13214 }
13215 }
13216
13217 return VINF_SUCCESS;
13218}
13219
13220
13221/** Opcode 0xb8. */
13222FNIEMOP_DEF(iemOp_eAX_Iv)
13223{
13224 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13225 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13226}
13227
13228
13229/** Opcode 0xb9. */
13230FNIEMOP_DEF(iemOp_eCX_Iv)
13231{
13232 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13233 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13234}
13235
13236
13237/** Opcode 0xba. */
13238FNIEMOP_DEF(iemOp_eDX_Iv)
13239{
13240 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13241 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13242}
13243
13244
13245/** Opcode 0xbb. */
13246FNIEMOP_DEF(iemOp_eBX_Iv)
13247{
13248 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13249 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13250}
13251
13252
13253/** Opcode 0xbc. */
13254FNIEMOP_DEF(iemOp_eSP_Iv)
13255{
13256 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13257 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13258}
13259
13260
13261/** Opcode 0xbd. */
13262FNIEMOP_DEF(iemOp_eBP_Iv)
13263{
13264 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13265 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13266}
13267
13268
13269/** Opcode 0xbe. */
13270FNIEMOP_DEF(iemOp_eSI_Iv)
13271{
13272 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13273 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13274}
13275
13276
13277/** Opcode 0xbf. */
13278FNIEMOP_DEF(iemOp_eDI_Iv)
13279{
13280 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13281 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13282}
13283
13284
13285/** Opcode 0xc0. */
13286FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13287{
13288 IEMOP_HLP_MIN_186();
13289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13290 PCIEMOPSHIFTSIZES pImpl;
13291 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13292 {
13293 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13294 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13295 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13296 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13297 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13298 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13299 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13300 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13301 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13302 }
13303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13304
13305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13306 {
13307 /* register */
13308 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13310 IEM_MC_BEGIN(3, 0);
13311 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13312 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13313 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13314 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13315 IEM_MC_REF_EFLAGS(pEFlags);
13316 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13317 IEM_MC_ADVANCE_RIP();
13318 IEM_MC_END();
13319 }
13320 else
13321 {
13322 /* memory */
13323 IEM_MC_BEGIN(3, 2);
13324 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13325 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13326 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13328
13329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13330 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13331 IEM_MC_ASSIGN(cShiftArg, cShift);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13334 IEM_MC_FETCH_EFLAGS(EFlags);
13335 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13336
13337 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13338 IEM_MC_COMMIT_EFLAGS(EFlags);
13339 IEM_MC_ADVANCE_RIP();
13340 IEM_MC_END();
13341 }
13342 return VINF_SUCCESS;
13343}
13344
13345
13346/** Opcode 0xc1. */
13347FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13348{
13349 IEMOP_HLP_MIN_186();
13350 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13351 PCIEMOPSHIFTSIZES pImpl;
13352 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13353 {
13354 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13355 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13356 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13357 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13358 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13359 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13360 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13361 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13362 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13363 }
13364 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13365
13366 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13367 {
13368 /* register */
13369 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13371 switch (pVCpu->iem.s.enmEffOpSize)
13372 {
13373 case IEMMODE_16BIT:
13374 IEM_MC_BEGIN(3, 0);
13375 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13376 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13377 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13378 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13379 IEM_MC_REF_EFLAGS(pEFlags);
13380 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13381 IEM_MC_ADVANCE_RIP();
13382 IEM_MC_END();
13383 return VINF_SUCCESS;
13384
13385 case IEMMODE_32BIT:
13386 IEM_MC_BEGIN(3, 0);
13387 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13388 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13389 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13390 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13391 IEM_MC_REF_EFLAGS(pEFlags);
13392 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13393 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13394 IEM_MC_ADVANCE_RIP();
13395 IEM_MC_END();
13396 return VINF_SUCCESS;
13397
13398 case IEMMODE_64BIT:
13399 IEM_MC_BEGIN(3, 0);
13400 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13401 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13402 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13403 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13404 IEM_MC_REF_EFLAGS(pEFlags);
13405 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13406 IEM_MC_ADVANCE_RIP();
13407 IEM_MC_END();
13408 return VINF_SUCCESS;
13409
13410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13411 }
13412 }
13413 else
13414 {
13415 /* memory */
13416 switch (pVCpu->iem.s.enmEffOpSize)
13417 {
13418 case IEMMODE_16BIT:
13419 IEM_MC_BEGIN(3, 2);
13420 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13421 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13422 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13424
13425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13426 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13427 IEM_MC_ASSIGN(cShiftArg, cShift);
13428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13429 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13430 IEM_MC_FETCH_EFLAGS(EFlags);
13431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13432
13433 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13434 IEM_MC_COMMIT_EFLAGS(EFlags);
13435 IEM_MC_ADVANCE_RIP();
13436 IEM_MC_END();
13437 return VINF_SUCCESS;
13438
13439 case IEMMODE_32BIT:
13440 IEM_MC_BEGIN(3, 2);
13441 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13442 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13443 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13445
13446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13447 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13448 IEM_MC_ASSIGN(cShiftArg, cShift);
13449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13450 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13451 IEM_MC_FETCH_EFLAGS(EFlags);
13452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13453
13454 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13455 IEM_MC_COMMIT_EFLAGS(EFlags);
13456 IEM_MC_ADVANCE_RIP();
13457 IEM_MC_END();
13458 return VINF_SUCCESS;
13459
13460 case IEMMODE_64BIT:
13461 IEM_MC_BEGIN(3, 2);
13462 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13463 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13464 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13466
13467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13468 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13469 IEM_MC_ASSIGN(cShiftArg, cShift);
13470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13471 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13472 IEM_MC_FETCH_EFLAGS(EFlags);
13473 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13474
13475 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13476 IEM_MC_COMMIT_EFLAGS(EFlags);
13477 IEM_MC_ADVANCE_RIP();
13478 IEM_MC_END();
13479 return VINF_SUCCESS;
13480
13481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13482 }
13483 }
13484}
13485
13486
13487/** Opcode 0xc2. */
13488FNIEMOP_DEF(iemOp_retn_Iw)
13489{
13490 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13491 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13493 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13494 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13495}
13496
13497
13498/** Opcode 0xc3. */
13499FNIEMOP_DEF(iemOp_retn)
13500{
13501 IEMOP_MNEMONIC(retn, "retn");
13502 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13504 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13505}
13506
13507
13508/** Opcode 0xc4. */
13509FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13510{
13511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13512 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13513 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13514 {
13515 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13516 /* The LES instruction is invalid 64-bit mode. In legacy and
13517 compatability mode it is invalid with MOD=3.
13518 The use as a VEX prefix is made possible by assigning the inverted
13519 REX.R to the top MOD bit, and the top bit in the inverted register
13520 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13521 to accessing registers 0..7 in this VEX form. */
13522 /** @todo VEX: Just use new tables for it. */
13523 return IEMOP_RAISE_INVALID_OPCODE();
13524 }
13525 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13526 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13527}
13528
13529
13530/** Opcode 0xc5. */
13531FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13532{
13533 /* The LDS instruction is invalid 64-bit mode. In legacy and
13534 compatability mode it is invalid with MOD=3.
13535 The use as a VEX prefix is made possible by assigning the inverted
13536 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13537 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13539 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13540 {
13541 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13542 {
13543 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13544 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13545 }
13546 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13547 }
13548
13549 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13550 /** @todo Test when exctly the VEX conformance checks kick in during
13551 * instruction decoding and fetching (using \#PF). */
13552 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13553 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13554 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13555#if 0 /* will make sense of this next week... */
13556 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13557 &&
13558 )
13559 {
13560
13561 }
13562#endif
13563
13564 /** @todo VEX: Just use new tables for it. */
13565 return IEMOP_RAISE_INVALID_OPCODE();
13566}
13567
13568
13569/** Opcode 0xc6. */
13570FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13571{
13572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13573 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13574 return IEMOP_RAISE_INVALID_OPCODE();
13575 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13576
13577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13578 {
13579 /* register access */
13580 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13582 IEM_MC_BEGIN(0, 0);
13583 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13584 IEM_MC_ADVANCE_RIP();
13585 IEM_MC_END();
13586 }
13587 else
13588 {
13589 /* memory access. */
13590 IEM_MC_BEGIN(0, 1);
13591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13593 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13595 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13596 IEM_MC_ADVANCE_RIP();
13597 IEM_MC_END();
13598 }
13599 return VINF_SUCCESS;
13600}
13601
13602
13603/** Opcode 0xc7. */
13604FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13605{
13606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13607 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13608 return IEMOP_RAISE_INVALID_OPCODE();
13609 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13610
13611 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13612 {
13613 /* register access */
13614 switch (pVCpu->iem.s.enmEffOpSize)
13615 {
13616 case IEMMODE_16BIT:
13617 IEM_MC_BEGIN(0, 0);
13618 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13620 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13621 IEM_MC_ADVANCE_RIP();
13622 IEM_MC_END();
13623 return VINF_SUCCESS;
13624
13625 case IEMMODE_32BIT:
13626 IEM_MC_BEGIN(0, 0);
13627 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13629 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13630 IEM_MC_ADVANCE_RIP();
13631 IEM_MC_END();
13632 return VINF_SUCCESS;
13633
13634 case IEMMODE_64BIT:
13635 IEM_MC_BEGIN(0, 0);
13636 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13638 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13639 IEM_MC_ADVANCE_RIP();
13640 IEM_MC_END();
13641 return VINF_SUCCESS;
13642
13643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13644 }
13645 }
13646 else
13647 {
13648 /* memory access. */
13649 switch (pVCpu->iem.s.enmEffOpSize)
13650 {
13651 case IEMMODE_16BIT:
13652 IEM_MC_BEGIN(0, 1);
13653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13655 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13657 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13658 IEM_MC_ADVANCE_RIP();
13659 IEM_MC_END();
13660 return VINF_SUCCESS;
13661
13662 case IEMMODE_32BIT:
13663 IEM_MC_BEGIN(0, 1);
13664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13668 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13669 IEM_MC_ADVANCE_RIP();
13670 IEM_MC_END();
13671 return VINF_SUCCESS;
13672
13673 case IEMMODE_64BIT:
13674 IEM_MC_BEGIN(0, 1);
13675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13677 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13679 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13680 IEM_MC_ADVANCE_RIP();
13681 IEM_MC_END();
13682 return VINF_SUCCESS;
13683
13684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13685 }
13686 }
13687}
13688
13689
13690
13691
13692/** Opcode 0xc8. */
13693FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13694{
13695 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13696 IEMOP_HLP_MIN_186();
13697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13698 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13699 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13701 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13702}
13703
13704
13705/** Opcode 0xc9. */
13706FNIEMOP_DEF(iemOp_leave)
13707{
13708 IEMOP_MNEMONIC(leave, "leave");
13709 IEMOP_HLP_MIN_186();
13710 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13712 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13713}
13714
13715
13716/** Opcode 0xca. */
13717FNIEMOP_DEF(iemOp_retf_Iw)
13718{
13719 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13720 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13722 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13723 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13724}
13725
13726
13727/** Opcode 0xcb. */
13728FNIEMOP_DEF(iemOp_retf)
13729{
13730 IEMOP_MNEMONIC(retf, "retf");
13731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13733 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13734}
13735
13736
13737/** Opcode 0xcc. */
13738FNIEMOP_DEF(iemOp_int_3)
13739{
13740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13741 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13742}
13743
13744
13745/** Opcode 0xcd. */
13746FNIEMOP_DEF(iemOp_int_Ib)
13747{
13748 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13750 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13751}
13752
13753
13754/** Opcode 0xce. */
13755FNIEMOP_DEF(iemOp_into)
13756{
13757 IEMOP_MNEMONIC(into, "into");
13758 IEMOP_HLP_NO_64BIT();
13759
13760 IEM_MC_BEGIN(2, 0);
13761 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13762 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13763 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13764 IEM_MC_END();
13765 return VINF_SUCCESS;
13766}
13767
13768
13769/** Opcode 0xcf. */
13770FNIEMOP_DEF(iemOp_iret)
13771{
13772 IEMOP_MNEMONIC(iret, "iret");
13773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13774 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13775}
13776
13777
13778/** Opcode 0xd0. */
13779FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13780{
13781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13782 PCIEMOPSHIFTSIZES pImpl;
13783 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13784 {
13785 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13786 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13787 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13788 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13789 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13790 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13791 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13792 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13793 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13794 }
13795 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13796
13797 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13798 {
13799 /* register */
13800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13801 IEM_MC_BEGIN(3, 0);
13802 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13803 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13804 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13805 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13806 IEM_MC_REF_EFLAGS(pEFlags);
13807 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13808 IEM_MC_ADVANCE_RIP();
13809 IEM_MC_END();
13810 }
13811 else
13812 {
13813 /* memory */
13814 IEM_MC_BEGIN(3, 2);
13815 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13816 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13817 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13819
13820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13822 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13823 IEM_MC_FETCH_EFLAGS(EFlags);
13824 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13825
13826 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13827 IEM_MC_COMMIT_EFLAGS(EFlags);
13828 IEM_MC_ADVANCE_RIP();
13829 IEM_MC_END();
13830 }
13831 return VINF_SUCCESS;
13832}
13833
13834
13835
13836/** Opcode 0xd1. */
13837FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13838{
13839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13840 PCIEMOPSHIFTSIZES pImpl;
13841 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13842 {
13843 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13844 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13845 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13846 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13847 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13848 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13849 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13850 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13851 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13852 }
13853 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13854
13855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13856 {
13857 /* register */
13858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13859 switch (pVCpu->iem.s.enmEffOpSize)
13860 {
13861 case IEMMODE_16BIT:
13862 IEM_MC_BEGIN(3, 0);
13863 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13864 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13865 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13866 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13867 IEM_MC_REF_EFLAGS(pEFlags);
13868 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13869 IEM_MC_ADVANCE_RIP();
13870 IEM_MC_END();
13871 return VINF_SUCCESS;
13872
13873 case IEMMODE_32BIT:
13874 IEM_MC_BEGIN(3, 0);
13875 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13876 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13878 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13879 IEM_MC_REF_EFLAGS(pEFlags);
13880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13881 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13882 IEM_MC_ADVANCE_RIP();
13883 IEM_MC_END();
13884 return VINF_SUCCESS;
13885
13886 case IEMMODE_64BIT:
13887 IEM_MC_BEGIN(3, 0);
13888 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13889 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13890 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13891 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13892 IEM_MC_REF_EFLAGS(pEFlags);
13893 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13894 IEM_MC_ADVANCE_RIP();
13895 IEM_MC_END();
13896 return VINF_SUCCESS;
13897
13898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13899 }
13900 }
13901 else
13902 {
13903 /* memory */
13904 switch (pVCpu->iem.s.enmEffOpSize)
13905 {
13906 case IEMMODE_16BIT:
13907 IEM_MC_BEGIN(3, 2);
13908 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13909 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13910 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13912
13913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13915 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13916 IEM_MC_FETCH_EFLAGS(EFlags);
13917 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13918
13919 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13920 IEM_MC_COMMIT_EFLAGS(EFlags);
13921 IEM_MC_ADVANCE_RIP();
13922 IEM_MC_END();
13923 return VINF_SUCCESS;
13924
13925 case IEMMODE_32BIT:
13926 IEM_MC_BEGIN(3, 2);
13927 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13928 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13929 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13931
13932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13934 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13935 IEM_MC_FETCH_EFLAGS(EFlags);
13936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13937
13938 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13939 IEM_MC_COMMIT_EFLAGS(EFlags);
13940 IEM_MC_ADVANCE_RIP();
13941 IEM_MC_END();
13942 return VINF_SUCCESS;
13943
13944 case IEMMODE_64BIT:
13945 IEM_MC_BEGIN(3, 2);
13946 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13947 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13948 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13950
13951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13953 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13954 IEM_MC_FETCH_EFLAGS(EFlags);
13955 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13956
13957 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13958 IEM_MC_COMMIT_EFLAGS(EFlags);
13959 IEM_MC_ADVANCE_RIP();
13960 IEM_MC_END();
13961 return VINF_SUCCESS;
13962
13963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13964 }
13965 }
13966}
13967
13968
13969/** Opcode 0xd2. */
13970FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13971{
13972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13973 PCIEMOPSHIFTSIZES pImpl;
13974 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13975 {
13976 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13977 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13978 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13979 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13980 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13981 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13982 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13983 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13984 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13985 }
13986 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13987
13988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13989 {
13990 /* register */
13991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13992 IEM_MC_BEGIN(3, 0);
13993 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13994 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13995 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13996 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13997 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13998 IEM_MC_REF_EFLAGS(pEFlags);
13999 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14000 IEM_MC_ADVANCE_RIP();
14001 IEM_MC_END();
14002 }
14003 else
14004 {
14005 /* memory */
14006 IEM_MC_BEGIN(3, 2);
14007 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14008 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14009 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14011
14012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14014 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14015 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14016 IEM_MC_FETCH_EFLAGS(EFlags);
14017 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14018
14019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14020 IEM_MC_COMMIT_EFLAGS(EFlags);
14021 IEM_MC_ADVANCE_RIP();
14022 IEM_MC_END();
14023 }
14024 return VINF_SUCCESS;
14025}
14026
14027
14028/** Opcode 0xd3. */
14029FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
14030{
14031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14032 PCIEMOPSHIFTSIZES pImpl;
14033 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14034 {
14035 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
14036 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
14037 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
14038 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
14039 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
14040 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
14041 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
14042 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14043 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
14044 }
14045 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14046
14047 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14048 {
14049 /* register */
14050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14051 switch (pVCpu->iem.s.enmEffOpSize)
14052 {
14053 case IEMMODE_16BIT:
14054 IEM_MC_BEGIN(3, 0);
14055 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14056 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14057 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14058 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14059 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14060 IEM_MC_REF_EFLAGS(pEFlags);
14061 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14062 IEM_MC_ADVANCE_RIP();
14063 IEM_MC_END();
14064 return VINF_SUCCESS;
14065
14066 case IEMMODE_32BIT:
14067 IEM_MC_BEGIN(3, 0);
14068 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14069 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14070 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14071 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14072 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14073 IEM_MC_REF_EFLAGS(pEFlags);
14074 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14075 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14076 IEM_MC_ADVANCE_RIP();
14077 IEM_MC_END();
14078 return VINF_SUCCESS;
14079
14080 case IEMMODE_64BIT:
14081 IEM_MC_BEGIN(3, 0);
14082 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14083 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14084 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14085 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14086 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14087 IEM_MC_REF_EFLAGS(pEFlags);
14088 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14089 IEM_MC_ADVANCE_RIP();
14090 IEM_MC_END();
14091 return VINF_SUCCESS;
14092
14093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14094 }
14095 }
14096 else
14097 {
14098 /* memory */
14099 switch (pVCpu->iem.s.enmEffOpSize)
14100 {
14101 case IEMMODE_16BIT:
14102 IEM_MC_BEGIN(3, 2);
14103 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14104 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14105 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14107
14108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14110 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14111 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14112 IEM_MC_FETCH_EFLAGS(EFlags);
14113 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14114
14115 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14116 IEM_MC_COMMIT_EFLAGS(EFlags);
14117 IEM_MC_ADVANCE_RIP();
14118 IEM_MC_END();
14119 return VINF_SUCCESS;
14120
14121 case IEMMODE_32BIT:
14122 IEM_MC_BEGIN(3, 2);
14123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14124 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14125 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14127
14128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14130 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14131 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14132 IEM_MC_FETCH_EFLAGS(EFlags);
14133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14134
14135 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14136 IEM_MC_COMMIT_EFLAGS(EFlags);
14137 IEM_MC_ADVANCE_RIP();
14138 IEM_MC_END();
14139 return VINF_SUCCESS;
14140
14141 case IEMMODE_64BIT:
14142 IEM_MC_BEGIN(3, 2);
14143 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14144 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14145 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14147
14148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14150 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14151 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14152 IEM_MC_FETCH_EFLAGS(EFlags);
14153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14154
14155 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14156 IEM_MC_COMMIT_EFLAGS(EFlags);
14157 IEM_MC_ADVANCE_RIP();
14158 IEM_MC_END();
14159 return VINF_SUCCESS;
14160
14161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14162 }
14163 }
14164}
14165
14166/** Opcode 0xd4. */
14167FNIEMOP_DEF(iemOp_aam_Ib)
14168{
14169 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14170 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14172 IEMOP_HLP_NO_64BIT();
14173 if (!bImm)
14174 return IEMOP_RAISE_DIVIDE_ERROR();
14175 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14176}
14177
14178
14179/** Opcode 0xd5. */
14180FNIEMOP_DEF(iemOp_aad_Ib)
14181{
14182 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14183 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14185 IEMOP_HLP_NO_64BIT();
14186 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14187}
14188
14189
14190/** Opcode 0xd6. */
14191FNIEMOP_DEF(iemOp_salc)
14192{
14193 IEMOP_MNEMONIC(salc, "salc");
14194 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14195 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14197 IEMOP_HLP_NO_64BIT();
14198
14199 IEM_MC_BEGIN(0, 0);
14200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14201 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14202 } IEM_MC_ELSE() {
14203 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14204 } IEM_MC_ENDIF();
14205 IEM_MC_ADVANCE_RIP();
14206 IEM_MC_END();
14207 return VINF_SUCCESS;
14208}
14209
14210
14211/** Opcode 0xd7. */
14212FNIEMOP_DEF(iemOp_xlat)
14213{
14214 IEMOP_MNEMONIC(xlat, "xlat");
14215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14216 switch (pVCpu->iem.s.enmEffAddrMode)
14217 {
14218 case IEMMODE_16BIT:
14219 IEM_MC_BEGIN(2, 0);
14220 IEM_MC_LOCAL(uint8_t, u8Tmp);
14221 IEM_MC_LOCAL(uint16_t, u16Addr);
14222 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14223 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14224 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14225 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14226 IEM_MC_ADVANCE_RIP();
14227 IEM_MC_END();
14228 return VINF_SUCCESS;
14229
14230 case IEMMODE_32BIT:
14231 IEM_MC_BEGIN(2, 0);
14232 IEM_MC_LOCAL(uint8_t, u8Tmp);
14233 IEM_MC_LOCAL(uint32_t, u32Addr);
14234 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14235 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14236 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14237 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14238 IEM_MC_ADVANCE_RIP();
14239 IEM_MC_END();
14240 return VINF_SUCCESS;
14241
14242 case IEMMODE_64BIT:
14243 IEM_MC_BEGIN(2, 0);
14244 IEM_MC_LOCAL(uint8_t, u8Tmp);
14245 IEM_MC_LOCAL(uint64_t, u64Addr);
14246 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14247 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14248 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14249 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14250 IEM_MC_ADVANCE_RIP();
14251 IEM_MC_END();
14252 return VINF_SUCCESS;
14253
14254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14255 }
14256}
14257
14258
14259/**
14260 * Common worker for FPU instructions working on ST0 and STn, and storing the
14261 * result in ST0.
14262 *
14263 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14264 */
14265FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14266{
14267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14268
14269 IEM_MC_BEGIN(3, 1);
14270 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14271 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14272 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14273 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14274
14275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14276 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14277 IEM_MC_PREPARE_FPU_USAGE();
14278 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14279 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14280 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14281 IEM_MC_ELSE()
14282 IEM_MC_FPU_STACK_UNDERFLOW(0);
14283 IEM_MC_ENDIF();
14284 IEM_MC_ADVANCE_RIP();
14285
14286 IEM_MC_END();
14287 return VINF_SUCCESS;
14288}
14289
14290
14291/**
14292 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14293 * flags.
14294 *
14295 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14296 */
14297FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14298{
14299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14300
14301 IEM_MC_BEGIN(3, 1);
14302 IEM_MC_LOCAL(uint16_t, u16Fsw);
14303 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14304 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14305 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14306
14307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14309 IEM_MC_PREPARE_FPU_USAGE();
14310 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14311 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14312 IEM_MC_UPDATE_FSW(u16Fsw);
14313 IEM_MC_ELSE()
14314 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14315 IEM_MC_ENDIF();
14316 IEM_MC_ADVANCE_RIP();
14317
14318 IEM_MC_END();
14319 return VINF_SUCCESS;
14320}
14321
14322
14323/**
14324 * Common worker for FPU instructions working on ST0 and STn, only affecting
14325 * flags, and popping when done.
14326 *
14327 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14328 */
14329FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14330{
14331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14332
14333 IEM_MC_BEGIN(3, 1);
14334 IEM_MC_LOCAL(uint16_t, u16Fsw);
14335 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14336 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14337 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14338
14339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14341 IEM_MC_PREPARE_FPU_USAGE();
14342 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14343 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14344 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14345 IEM_MC_ELSE()
14346 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14347 IEM_MC_ENDIF();
14348 IEM_MC_ADVANCE_RIP();
14349
14350 IEM_MC_END();
14351 return VINF_SUCCESS;
14352}
14353
14354
14355/** Opcode 0xd8 11/0. */
14356FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14357{
14358 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14359 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14360}
14361
14362
14363/** Opcode 0xd8 11/1. */
14364FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14365{
14366 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14367 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14368}
14369
14370
14371/** Opcode 0xd8 11/2. */
14372FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14373{
14374 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14375 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14376}
14377
14378
14379/** Opcode 0xd8 11/3. */
14380FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14381{
14382 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14383 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14384}
14385
14386
14387/** Opcode 0xd8 11/4. */
14388FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14389{
14390 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14391 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14392}
14393
14394
14395/** Opcode 0xd8 11/5. */
14396FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14397{
14398 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14399 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14400}
14401
14402
14403/** Opcode 0xd8 11/6. */
14404FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14405{
14406 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14407 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14408}
14409
14410
14411/** Opcode 0xd8 11/7. */
14412FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14413{
14414 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14415 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14416}
14417
14418
14419/**
14420 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14421 * the result in ST0.
14422 *
14423 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14424 */
14425FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14426{
14427 IEM_MC_BEGIN(3, 3);
14428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14429 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14430 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14431 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14432 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14433 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14434
14435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14437
14438 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14439 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14440 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14441
14442 IEM_MC_PREPARE_FPU_USAGE();
14443 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14444 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14445 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14446 IEM_MC_ELSE()
14447 IEM_MC_FPU_STACK_UNDERFLOW(0);
14448 IEM_MC_ENDIF();
14449 IEM_MC_ADVANCE_RIP();
14450
14451 IEM_MC_END();
14452 return VINF_SUCCESS;
14453}
14454
14455
14456/** Opcode 0xd8 !11/0. */
14457FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14458{
14459 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14460 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14461}
14462
14463
14464/** Opcode 0xd8 !11/1. */
14465FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14466{
14467 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14468 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14469}
14470
14471
14472/** Opcode 0xd8 !11/2. */
14473FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14474{
14475 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14476
14477 IEM_MC_BEGIN(3, 3);
14478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14479 IEM_MC_LOCAL(uint16_t, u16Fsw);
14480 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14481 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14482 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14483 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14484
14485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14487
14488 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14489 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14490 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14491
14492 IEM_MC_PREPARE_FPU_USAGE();
14493 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14494 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14495 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14496 IEM_MC_ELSE()
14497 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14498 IEM_MC_ENDIF();
14499 IEM_MC_ADVANCE_RIP();
14500
14501 IEM_MC_END();
14502 return VINF_SUCCESS;
14503}
14504
14505
14506/** Opcode 0xd8 !11/3. */
14507FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14508{
14509 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14510
14511 IEM_MC_BEGIN(3, 3);
14512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14513 IEM_MC_LOCAL(uint16_t, u16Fsw);
14514 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14515 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14516 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14517 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14518
14519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14521
14522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14523 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14524 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14525
14526 IEM_MC_PREPARE_FPU_USAGE();
14527 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14528 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14529 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14530 IEM_MC_ELSE()
14531 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14532 IEM_MC_ENDIF();
14533 IEM_MC_ADVANCE_RIP();
14534
14535 IEM_MC_END();
14536 return VINF_SUCCESS;
14537}
14538
14539
14540/** Opcode 0xd8 !11/4. */
14541FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14542{
14543 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14544 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14545}
14546
14547
14548/** Opcode 0xd8 !11/5. */
14549FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14550{
14551 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14552 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14553}
14554
14555
14556/** Opcode 0xd8 !11/6. */
14557FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14558{
14559 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14560 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14561}
14562
14563
14564/** Opcode 0xd8 !11/7. */
14565FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14566{
14567 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14568 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14569}
14570
14571
14572/** Opcode 0xd8. */
14573FNIEMOP_DEF(iemOp_EscF0)
14574{
14575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14576 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14577
14578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14579 {
14580 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14581 {
14582 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14583 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14584 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14585 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14586 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14587 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14588 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14589 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14591 }
14592 }
14593 else
14594 {
14595 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14596 {
14597 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14598 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14599 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14600 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14601 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14602 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14603 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14604 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14606 }
14607 }
14608}
14609
14610
14611/** Opcode 0xd9 /0 mem32real
14612 * @sa iemOp_fld_m64r */
14613FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14614{
14615 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14616
14617 IEM_MC_BEGIN(2, 3);
14618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14619 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14620 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14621 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14622 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14623
14624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14626
14627 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14628 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14629 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14630
14631 IEM_MC_PREPARE_FPU_USAGE();
14632 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14633 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14634 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14635 IEM_MC_ELSE()
14636 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14637 IEM_MC_ENDIF();
14638 IEM_MC_ADVANCE_RIP();
14639
14640 IEM_MC_END();
14641 return VINF_SUCCESS;
14642}
14643
14644
14645/** Opcode 0xd9 !11/2 mem32real */
14646FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14647{
14648 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14649 IEM_MC_BEGIN(3, 2);
14650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14651 IEM_MC_LOCAL(uint16_t, u16Fsw);
14652 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14653 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14654 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14655
14656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14660
14661 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14662 IEM_MC_PREPARE_FPU_USAGE();
14663 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14664 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14665 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14666 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14667 IEM_MC_ELSE()
14668 IEM_MC_IF_FCW_IM()
14669 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14670 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14671 IEM_MC_ENDIF();
14672 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14673 IEM_MC_ENDIF();
14674 IEM_MC_ADVANCE_RIP();
14675
14676 IEM_MC_END();
14677 return VINF_SUCCESS;
14678}
14679
14680
14681/** Opcode 0xd9 !11/3 */
14682FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14683{
14684 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14685 IEM_MC_BEGIN(3, 2);
14686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14687 IEM_MC_LOCAL(uint16_t, u16Fsw);
14688 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14689 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14690 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14691
14692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14696
14697 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14698 IEM_MC_PREPARE_FPU_USAGE();
14699 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14700 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14701 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14702 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14703 IEM_MC_ELSE()
14704 IEM_MC_IF_FCW_IM()
14705 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14706 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14707 IEM_MC_ENDIF();
14708 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14709 IEM_MC_ENDIF();
14710 IEM_MC_ADVANCE_RIP();
14711
14712 IEM_MC_END();
14713 return VINF_SUCCESS;
14714}
14715
14716
14717/** Opcode 0xd9 !11/4 */
14718FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14719{
14720 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14721 IEM_MC_BEGIN(3, 0);
14722 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14723 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14724 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14727 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14728 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14729 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14730 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14731 IEM_MC_END();
14732 return VINF_SUCCESS;
14733}
14734
14735
14736/** Opcode 0xd9 !11/5 */
14737FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14738{
14739 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14740 IEM_MC_BEGIN(1, 1);
14741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14742 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14745 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14746 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14747 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14748 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14749 IEM_MC_END();
14750 return VINF_SUCCESS;
14751}
14752
14753
14754/** Opcode 0xd9 !11/6 */
14755FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14756{
14757 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14758 IEM_MC_BEGIN(3, 0);
14759 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14760 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14761 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14764 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14765 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14766 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14767 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14768 IEM_MC_END();
14769 return VINF_SUCCESS;
14770}
14771
14772
14773/** Opcode 0xd9 !11/7 */
14774FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14775{
14776 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14777 IEM_MC_BEGIN(2, 0);
14778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14779 IEM_MC_LOCAL(uint16_t, u16Fcw);
14780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14783 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14784 IEM_MC_FETCH_FCW(u16Fcw);
14785 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14786 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14787 IEM_MC_END();
14788 return VINF_SUCCESS;
14789}
14790
14791
14792/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14793FNIEMOP_DEF(iemOp_fnop)
14794{
14795 IEMOP_MNEMONIC(fnop, "fnop");
14796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14797
14798 IEM_MC_BEGIN(0, 0);
14799 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14800 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14801 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14802 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14803 * intel optimizations. Investigate. */
14804 IEM_MC_UPDATE_FPU_OPCODE_IP();
14805 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14806 IEM_MC_END();
14807 return VINF_SUCCESS;
14808}
14809
14810
14811/** Opcode 0xd9 11/0 stN */
14812FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14813{
14814 IEMOP_MNEMONIC(fld_stN, "fld stN");
14815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14816
14817 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14818 * indicates that it does. */
14819 IEM_MC_BEGIN(0, 2);
14820 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14821 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14824
14825 IEM_MC_PREPARE_FPU_USAGE();
14826 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14827 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14828 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14829 IEM_MC_ELSE()
14830 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14831 IEM_MC_ENDIF();
14832
14833 IEM_MC_ADVANCE_RIP();
14834 IEM_MC_END();
14835
14836 return VINF_SUCCESS;
14837}
14838
14839
14840/** Opcode 0xd9 11/3 stN */
14841FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14842{
14843 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14845
14846 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14847 * indicates that it does. */
14848 IEM_MC_BEGIN(1, 3);
14849 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14850 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14851 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14852 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14853 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14854 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14855
14856 IEM_MC_PREPARE_FPU_USAGE();
14857 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14858 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14859 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14860 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14861 IEM_MC_ELSE()
14862 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14863 IEM_MC_ENDIF();
14864
14865 IEM_MC_ADVANCE_RIP();
14866 IEM_MC_END();
14867
14868 return VINF_SUCCESS;
14869}
14870
14871
14872/** Opcode 0xd9 11/4, 0xdd 11/2. */
14873FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14874{
14875 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14877
14878 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14879 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14880 if (!iDstReg)
14881 {
14882 IEM_MC_BEGIN(0, 1);
14883 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14884 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14885 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14886
14887 IEM_MC_PREPARE_FPU_USAGE();
14888 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14889 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14890 IEM_MC_ELSE()
14891 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14892 IEM_MC_ENDIF();
14893
14894 IEM_MC_ADVANCE_RIP();
14895 IEM_MC_END();
14896 }
14897 else
14898 {
14899 IEM_MC_BEGIN(0, 2);
14900 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14901 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14903 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14904
14905 IEM_MC_PREPARE_FPU_USAGE();
14906 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14907 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14908 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14909 IEM_MC_ELSE()
14910 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14911 IEM_MC_ENDIF();
14912
14913 IEM_MC_ADVANCE_RIP();
14914 IEM_MC_END();
14915 }
14916 return VINF_SUCCESS;
14917}
14918
14919
14920/**
14921 * Common worker for FPU instructions working on ST0 and replaces it with the
14922 * result, i.e. unary operators.
14923 *
14924 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14925 */
14926FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14927{
14928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14929
14930 IEM_MC_BEGIN(2, 1);
14931 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14932 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14933 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14934
14935 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14936 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14937 IEM_MC_PREPARE_FPU_USAGE();
14938 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14939 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14940 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14941 IEM_MC_ELSE()
14942 IEM_MC_FPU_STACK_UNDERFLOW(0);
14943 IEM_MC_ENDIF();
14944 IEM_MC_ADVANCE_RIP();
14945
14946 IEM_MC_END();
14947 return VINF_SUCCESS;
14948}
14949
14950
14951/** Opcode 0xd9 0xe0. */
14952FNIEMOP_DEF(iemOp_fchs)
14953{
14954 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14955 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14956}
14957
14958
14959/** Opcode 0xd9 0xe1. */
14960FNIEMOP_DEF(iemOp_fabs)
14961{
14962 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14963 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14964}
14965
14966
14967/**
14968 * Common worker for FPU instructions working on ST0 and only returns FSW.
14969 *
14970 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14971 */
14972FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14973{
14974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14975
14976 IEM_MC_BEGIN(2, 1);
14977 IEM_MC_LOCAL(uint16_t, u16Fsw);
14978 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14979 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14980
14981 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14982 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14983 IEM_MC_PREPARE_FPU_USAGE();
14984 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14985 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14986 IEM_MC_UPDATE_FSW(u16Fsw);
14987 IEM_MC_ELSE()
14988 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14989 IEM_MC_ENDIF();
14990 IEM_MC_ADVANCE_RIP();
14991
14992 IEM_MC_END();
14993 return VINF_SUCCESS;
14994}
14995
14996
14997/** Opcode 0xd9 0xe4. */
14998FNIEMOP_DEF(iemOp_ftst)
14999{
15000 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
15001 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
15002}
15003
15004
15005/** Opcode 0xd9 0xe5. */
15006FNIEMOP_DEF(iemOp_fxam)
15007{
15008 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
15009 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
15010}
15011
15012
15013/**
15014 * Common worker for FPU instructions pushing a constant onto the FPU stack.
15015 *
15016 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15017 */
15018FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
15019{
15020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15021
15022 IEM_MC_BEGIN(1, 1);
15023 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15024 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15025
15026 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15027 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15028 IEM_MC_PREPARE_FPU_USAGE();
15029 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15030 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
15031 IEM_MC_PUSH_FPU_RESULT(FpuRes);
15032 IEM_MC_ELSE()
15033 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
15034 IEM_MC_ENDIF();
15035 IEM_MC_ADVANCE_RIP();
15036
15037 IEM_MC_END();
15038 return VINF_SUCCESS;
15039}
15040
15041
15042/** Opcode 0xd9 0xe8. */
15043FNIEMOP_DEF(iemOp_fld1)
15044{
15045 IEMOP_MNEMONIC(fld1, "fld1");
15046 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
15047}
15048
15049
15050/** Opcode 0xd9 0xe9. */
15051FNIEMOP_DEF(iemOp_fldl2t)
15052{
15053 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15054 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15055}
15056
15057
15058/** Opcode 0xd9 0xea. */
15059FNIEMOP_DEF(iemOp_fldl2e)
15060{
15061 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15062 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15063}
15064
15065/** Opcode 0xd9 0xeb. */
15066FNIEMOP_DEF(iemOp_fldpi)
15067{
15068 IEMOP_MNEMONIC(fldpi, "fldpi");
15069 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15070}
15071
15072
15073/** Opcode 0xd9 0xec. */
15074FNIEMOP_DEF(iemOp_fldlg2)
15075{
15076 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15077 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15078}
15079
15080/** Opcode 0xd9 0xed. */
15081FNIEMOP_DEF(iemOp_fldln2)
15082{
15083 IEMOP_MNEMONIC(fldln2, "fldln2");
15084 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15085}
15086
15087
15088/** Opcode 0xd9 0xee. */
15089FNIEMOP_DEF(iemOp_fldz)
15090{
15091 IEMOP_MNEMONIC(fldz, "fldz");
15092 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15093}
15094
15095
15096/** Opcode 0xd9 0xf0. */
15097FNIEMOP_DEF(iemOp_f2xm1)
15098{
15099 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15100 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15101}
15102
15103
15104/**
15105 * Common worker for FPU instructions working on STn and ST0, storing the result
15106 * in STn, and popping the stack unless IE, DE or ZE was raised.
15107 *
15108 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15109 */
15110FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15111{
15112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15113
15114 IEM_MC_BEGIN(3, 1);
15115 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15116 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15117 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15118 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15119
15120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15122
15123 IEM_MC_PREPARE_FPU_USAGE();
15124 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15125 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15126 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15127 IEM_MC_ELSE()
15128 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15129 IEM_MC_ENDIF();
15130 IEM_MC_ADVANCE_RIP();
15131
15132 IEM_MC_END();
15133 return VINF_SUCCESS;
15134}
15135
15136
15137/** Opcode 0xd9 0xf1. */
15138FNIEMOP_DEF(iemOp_fyl2x)
15139{
15140 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15141 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15142}
15143
15144
15145/**
15146 * Common worker for FPU instructions working on ST0 and having two outputs, one
15147 * replacing ST0 and one pushed onto the stack.
15148 *
15149 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15150 */
15151FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15152{
15153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15154
15155 IEM_MC_BEGIN(2, 1);
15156 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15157 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15158 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15159
15160 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15161 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15162 IEM_MC_PREPARE_FPU_USAGE();
15163 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15164 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15165 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15166 IEM_MC_ELSE()
15167 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15168 IEM_MC_ENDIF();
15169 IEM_MC_ADVANCE_RIP();
15170
15171 IEM_MC_END();
15172 return VINF_SUCCESS;
15173}
15174
15175
15176/** Opcode 0xd9 0xf2. */
15177FNIEMOP_DEF(iemOp_fptan)
15178{
15179 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15180 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15181}
15182
15183
15184/** Opcode 0xd9 0xf3. */
15185FNIEMOP_DEF(iemOp_fpatan)
15186{
15187 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15188 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15189}
15190
15191
15192/** Opcode 0xd9 0xf4. */
15193FNIEMOP_DEF(iemOp_fxtract)
15194{
15195 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15196 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15197}
15198
15199
15200/** Opcode 0xd9 0xf5. */
15201FNIEMOP_DEF(iemOp_fprem1)
15202{
15203 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15204 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15205}
15206
15207
15208/** Opcode 0xd9 0xf6. */
15209FNIEMOP_DEF(iemOp_fdecstp)
15210{
15211 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15213 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15214 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15215 * FINCSTP and FDECSTP. */
15216
15217 IEM_MC_BEGIN(0,0);
15218
15219 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15220 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15221
15222 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15223 IEM_MC_FPU_STACK_DEC_TOP();
15224 IEM_MC_UPDATE_FSW_CONST(0);
15225
15226 IEM_MC_ADVANCE_RIP();
15227 IEM_MC_END();
15228 return VINF_SUCCESS;
15229}
15230
15231
15232/** Opcode 0xd9 0xf7. */
15233FNIEMOP_DEF(iemOp_fincstp)
15234{
15235 IEMOP_MNEMONIC(fincstp, "fincstp");
15236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15237 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15238 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15239 * FINCSTP and FDECSTP. */
15240
15241 IEM_MC_BEGIN(0,0);
15242
15243 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15244 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15245
15246 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15247 IEM_MC_FPU_STACK_INC_TOP();
15248 IEM_MC_UPDATE_FSW_CONST(0);
15249
15250 IEM_MC_ADVANCE_RIP();
15251 IEM_MC_END();
15252 return VINF_SUCCESS;
15253}
15254
15255
15256/** Opcode 0xd9 0xf8. */
15257FNIEMOP_DEF(iemOp_fprem)
15258{
15259 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15260 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15261}
15262
15263
15264/** Opcode 0xd9 0xf9. */
15265FNIEMOP_DEF(iemOp_fyl2xp1)
15266{
15267 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15268 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15269}
15270
15271
15272/** Opcode 0xd9 0xfa. */
15273FNIEMOP_DEF(iemOp_fsqrt)
15274{
15275 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15276 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15277}
15278
15279
15280/** Opcode 0xd9 0xfb. */
15281FNIEMOP_DEF(iemOp_fsincos)
15282{
15283 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15284 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15285}
15286
15287
15288/** Opcode 0xd9 0xfc. */
15289FNIEMOP_DEF(iemOp_frndint)
15290{
15291 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15292 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15293}
15294
15295
15296/** Opcode 0xd9 0xfd. */
15297FNIEMOP_DEF(iemOp_fscale)
15298{
15299 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15300 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15301}
15302
15303
15304/** Opcode 0xd9 0xfe. */
15305FNIEMOP_DEF(iemOp_fsin)
15306{
15307 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15308 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15309}
15310
15311
15312/** Opcode 0xd9 0xff. */
15313FNIEMOP_DEF(iemOp_fcos)
15314{
15315 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15316 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15317}
15318
15319
15320/** Used by iemOp_EscF1. */
15321IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15322{
15323 /* 0xe0 */ iemOp_fchs,
15324 /* 0xe1 */ iemOp_fabs,
15325 /* 0xe2 */ iemOp_Invalid,
15326 /* 0xe3 */ iemOp_Invalid,
15327 /* 0xe4 */ iemOp_ftst,
15328 /* 0xe5 */ iemOp_fxam,
15329 /* 0xe6 */ iemOp_Invalid,
15330 /* 0xe7 */ iemOp_Invalid,
15331 /* 0xe8 */ iemOp_fld1,
15332 /* 0xe9 */ iemOp_fldl2t,
15333 /* 0xea */ iemOp_fldl2e,
15334 /* 0xeb */ iemOp_fldpi,
15335 /* 0xec */ iemOp_fldlg2,
15336 /* 0xed */ iemOp_fldln2,
15337 /* 0xee */ iemOp_fldz,
15338 /* 0xef */ iemOp_Invalid,
15339 /* 0xf0 */ iemOp_f2xm1,
15340 /* 0xf1 */ iemOp_fyl2x,
15341 /* 0xf2 */ iemOp_fptan,
15342 /* 0xf3 */ iemOp_fpatan,
15343 /* 0xf4 */ iemOp_fxtract,
15344 /* 0xf5 */ iemOp_fprem1,
15345 /* 0xf6 */ iemOp_fdecstp,
15346 /* 0xf7 */ iemOp_fincstp,
15347 /* 0xf8 */ iemOp_fprem,
15348 /* 0xf9 */ iemOp_fyl2xp1,
15349 /* 0xfa */ iemOp_fsqrt,
15350 /* 0xfb */ iemOp_fsincos,
15351 /* 0xfc */ iemOp_frndint,
15352 /* 0xfd */ iemOp_fscale,
15353 /* 0xfe */ iemOp_fsin,
15354 /* 0xff */ iemOp_fcos
15355};
15356
15357
15358/** Opcode 0xd9. */
15359FNIEMOP_DEF(iemOp_EscF1)
15360{
15361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15362 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15363
15364 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15365 {
15366 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15367 {
15368 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15369 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15370 case 2:
15371 if (bRm == 0xd0)
15372 return FNIEMOP_CALL(iemOp_fnop);
15373 return IEMOP_RAISE_INVALID_OPCODE();
15374 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15375 case 4:
15376 case 5:
15377 case 6:
15378 case 7:
15379 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15380 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15382 }
15383 }
15384 else
15385 {
15386 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15387 {
15388 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15389 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15390 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15391 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15392 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15393 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15394 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15395 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15397 }
15398 }
15399}
15400
15401
15402/** Opcode 0xda 11/0. */
15403FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15404{
15405 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15407
15408 IEM_MC_BEGIN(0, 1);
15409 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15410
15411 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15412 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15413
15414 IEM_MC_PREPARE_FPU_USAGE();
15415 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15416 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15417 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15418 IEM_MC_ENDIF();
15419 IEM_MC_UPDATE_FPU_OPCODE_IP();
15420 IEM_MC_ELSE()
15421 IEM_MC_FPU_STACK_UNDERFLOW(0);
15422 IEM_MC_ENDIF();
15423 IEM_MC_ADVANCE_RIP();
15424
15425 IEM_MC_END();
15426 return VINF_SUCCESS;
15427}
15428
15429
15430/** Opcode 0xda 11/1. */
15431FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15432{
15433 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15435
15436 IEM_MC_BEGIN(0, 1);
15437 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15438
15439 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15440 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15441
15442 IEM_MC_PREPARE_FPU_USAGE();
15443 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15444 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15445 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15446 IEM_MC_ENDIF();
15447 IEM_MC_UPDATE_FPU_OPCODE_IP();
15448 IEM_MC_ELSE()
15449 IEM_MC_FPU_STACK_UNDERFLOW(0);
15450 IEM_MC_ENDIF();
15451 IEM_MC_ADVANCE_RIP();
15452
15453 IEM_MC_END();
15454 return VINF_SUCCESS;
15455}
15456
15457
15458/** Opcode 0xda 11/2. */
15459FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15460{
15461 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15463
15464 IEM_MC_BEGIN(0, 1);
15465 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15466
15467 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15468 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15469
15470 IEM_MC_PREPARE_FPU_USAGE();
15471 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15472 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15473 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15474 IEM_MC_ENDIF();
15475 IEM_MC_UPDATE_FPU_OPCODE_IP();
15476 IEM_MC_ELSE()
15477 IEM_MC_FPU_STACK_UNDERFLOW(0);
15478 IEM_MC_ENDIF();
15479 IEM_MC_ADVANCE_RIP();
15480
15481 IEM_MC_END();
15482 return VINF_SUCCESS;
15483}
15484
15485
15486/** Opcode 0xda 11/3. */
15487FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15488{
15489 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15491
15492 IEM_MC_BEGIN(0, 1);
15493 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15494
15495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15497
15498 IEM_MC_PREPARE_FPU_USAGE();
15499 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15500 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15501 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15502 IEM_MC_ENDIF();
15503 IEM_MC_UPDATE_FPU_OPCODE_IP();
15504 IEM_MC_ELSE()
15505 IEM_MC_FPU_STACK_UNDERFLOW(0);
15506 IEM_MC_ENDIF();
15507 IEM_MC_ADVANCE_RIP();
15508
15509 IEM_MC_END();
15510 return VINF_SUCCESS;
15511}
15512
15513
15514/**
15515 * Common worker for FPU instructions working on ST0 and STn, only affecting
15516 * flags, and popping twice when done.
15517 *
15518 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15519 */
15520FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15521{
15522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15523
15524 IEM_MC_BEGIN(3, 1);
15525 IEM_MC_LOCAL(uint16_t, u16Fsw);
15526 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15527 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15528 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15529
15530 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15531 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15532
15533 IEM_MC_PREPARE_FPU_USAGE();
15534 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15535 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15536 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15537 IEM_MC_ELSE()
15538 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15539 IEM_MC_ENDIF();
15540 IEM_MC_ADVANCE_RIP();
15541
15542 IEM_MC_END();
15543 return VINF_SUCCESS;
15544}
15545
15546
15547/** Opcode 0xda 0xe9. */
15548FNIEMOP_DEF(iemOp_fucompp)
15549{
15550 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15551 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15552}
15553
15554
15555/**
15556 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15557 * the result in ST0.
15558 *
15559 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15560 */
15561FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15562{
15563 IEM_MC_BEGIN(3, 3);
15564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15565 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15566 IEM_MC_LOCAL(int32_t, i32Val2);
15567 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15568 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15569 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15570
15571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15573
15574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15576 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15577
15578 IEM_MC_PREPARE_FPU_USAGE();
15579 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15580 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15581 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15582 IEM_MC_ELSE()
15583 IEM_MC_FPU_STACK_UNDERFLOW(0);
15584 IEM_MC_ENDIF();
15585 IEM_MC_ADVANCE_RIP();
15586
15587 IEM_MC_END();
15588 return VINF_SUCCESS;
15589}
15590
15591
15592/** Opcode 0xda !11/0. */
15593FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15594{
15595 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15596 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15597}
15598
15599
15600/** Opcode 0xda !11/1. */
15601FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15602{
15603 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15604 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15605}
15606
15607
15608/** Opcode 0xda !11/2. */
15609FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15610{
15611 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15612
15613 IEM_MC_BEGIN(3, 3);
15614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15615 IEM_MC_LOCAL(uint16_t, u16Fsw);
15616 IEM_MC_LOCAL(int32_t, i32Val2);
15617 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15618 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15619 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15620
15621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15623
15624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15625 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15626 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15627
15628 IEM_MC_PREPARE_FPU_USAGE();
15629 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15630 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15631 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15632 IEM_MC_ELSE()
15633 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15634 IEM_MC_ENDIF();
15635 IEM_MC_ADVANCE_RIP();
15636
15637 IEM_MC_END();
15638 return VINF_SUCCESS;
15639}
15640
15641
15642/** Opcode 0xda !11/3. */
15643FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15644{
15645 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15646
15647 IEM_MC_BEGIN(3, 3);
15648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15649 IEM_MC_LOCAL(uint16_t, u16Fsw);
15650 IEM_MC_LOCAL(int32_t, i32Val2);
15651 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15652 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15653 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15654
15655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15657
15658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15660 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15661
15662 IEM_MC_PREPARE_FPU_USAGE();
15663 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15664 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15665 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15666 IEM_MC_ELSE()
15667 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15668 IEM_MC_ENDIF();
15669 IEM_MC_ADVANCE_RIP();
15670
15671 IEM_MC_END();
15672 return VINF_SUCCESS;
15673}
15674
15675
15676/** Opcode 0xda !11/4. */
15677FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15678{
15679 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15680 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15681}
15682
15683
15684/** Opcode 0xda !11/5. */
15685FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15686{
15687 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15688 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15689}
15690
15691
15692/** Opcode 0xda !11/6. */
15693FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15694{
15695 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15696 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15697}
15698
15699
15700/** Opcode 0xda !11/7. */
15701FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15702{
15703 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15704 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15705}
15706
15707
15708/** Opcode 0xda. */
15709FNIEMOP_DEF(iemOp_EscF2)
15710{
15711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15712 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15714 {
15715 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15716 {
15717 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15718 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15719 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15720 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15721 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15722 case 5:
15723 if (bRm == 0xe9)
15724 return FNIEMOP_CALL(iemOp_fucompp);
15725 return IEMOP_RAISE_INVALID_OPCODE();
15726 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15727 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15729 }
15730 }
15731 else
15732 {
15733 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15734 {
15735 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15736 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15737 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15738 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15739 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15740 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15741 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15742 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15744 }
15745 }
15746}
15747
15748
15749/** Opcode 0xdb !11/0. */
15750FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15751{
15752 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15753
15754 IEM_MC_BEGIN(2, 3);
15755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15756 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15757 IEM_MC_LOCAL(int32_t, i32Val);
15758 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15759 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15760
15761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15763
15764 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15765 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15766 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15767
15768 IEM_MC_PREPARE_FPU_USAGE();
15769 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15770 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15771 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15772 IEM_MC_ELSE()
15773 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15774 IEM_MC_ENDIF();
15775 IEM_MC_ADVANCE_RIP();
15776
15777 IEM_MC_END();
15778 return VINF_SUCCESS;
15779}
15780
15781
15782/** Opcode 0xdb !11/1. */
15783FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15784{
15785 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15786 IEM_MC_BEGIN(3, 2);
15787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15788 IEM_MC_LOCAL(uint16_t, u16Fsw);
15789 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15790 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15791 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15792
15793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15795 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15796 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15797
15798 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15799 IEM_MC_PREPARE_FPU_USAGE();
15800 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15801 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15802 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15803 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15804 IEM_MC_ELSE()
15805 IEM_MC_IF_FCW_IM()
15806 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15807 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15808 IEM_MC_ENDIF();
15809 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15810 IEM_MC_ENDIF();
15811 IEM_MC_ADVANCE_RIP();
15812
15813 IEM_MC_END();
15814 return VINF_SUCCESS;
15815}
15816
15817
15818/** Opcode 0xdb !11/2. */
15819FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15820{
15821 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15822 IEM_MC_BEGIN(3, 2);
15823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15824 IEM_MC_LOCAL(uint16_t, u16Fsw);
15825 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15826 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15827 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15828
15829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15831 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15832 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15833
15834 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15835 IEM_MC_PREPARE_FPU_USAGE();
15836 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15837 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15838 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15839 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15840 IEM_MC_ELSE()
15841 IEM_MC_IF_FCW_IM()
15842 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15843 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15844 IEM_MC_ENDIF();
15845 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15846 IEM_MC_ENDIF();
15847 IEM_MC_ADVANCE_RIP();
15848
15849 IEM_MC_END();
15850 return VINF_SUCCESS;
15851}
15852
15853
15854/** Opcode 0xdb !11/3. */
15855FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15856{
15857 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15858 IEM_MC_BEGIN(3, 2);
15859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15860 IEM_MC_LOCAL(uint16_t, u16Fsw);
15861 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15862 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15863 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15864
15865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15869
15870 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15871 IEM_MC_PREPARE_FPU_USAGE();
15872 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15873 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15874 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15875 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15876 IEM_MC_ELSE()
15877 IEM_MC_IF_FCW_IM()
15878 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15879 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15880 IEM_MC_ENDIF();
15881 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15882 IEM_MC_ENDIF();
15883 IEM_MC_ADVANCE_RIP();
15884
15885 IEM_MC_END();
15886 return VINF_SUCCESS;
15887}
15888
15889
15890/** Opcode 0xdb !11/5. */
15891FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15892{
15893 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15894
15895 IEM_MC_BEGIN(2, 3);
15896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15897 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15898 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15899 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15900 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15901
15902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15904
15905 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15906 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15907 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15908
15909 IEM_MC_PREPARE_FPU_USAGE();
15910 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15911 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15912 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15913 IEM_MC_ELSE()
15914 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15915 IEM_MC_ENDIF();
15916 IEM_MC_ADVANCE_RIP();
15917
15918 IEM_MC_END();
15919 return VINF_SUCCESS;
15920}
15921
15922
15923/** Opcode 0xdb !11/7. */
15924FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15925{
15926 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15927 IEM_MC_BEGIN(3, 2);
15928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15929 IEM_MC_LOCAL(uint16_t, u16Fsw);
15930 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15931 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15932 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15933
15934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15938
15939 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15940 IEM_MC_PREPARE_FPU_USAGE();
15941 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15942 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15943 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15944 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15945 IEM_MC_ELSE()
15946 IEM_MC_IF_FCW_IM()
15947 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15948 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15949 IEM_MC_ENDIF();
15950 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15951 IEM_MC_ENDIF();
15952 IEM_MC_ADVANCE_RIP();
15953
15954 IEM_MC_END();
15955 return VINF_SUCCESS;
15956}
15957
15958
15959/** Opcode 0xdb 11/0. */
15960FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15961{
15962 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15964
15965 IEM_MC_BEGIN(0, 1);
15966 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15967
15968 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15969 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15970
15971 IEM_MC_PREPARE_FPU_USAGE();
15972 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15973 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15974 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15975 IEM_MC_ENDIF();
15976 IEM_MC_UPDATE_FPU_OPCODE_IP();
15977 IEM_MC_ELSE()
15978 IEM_MC_FPU_STACK_UNDERFLOW(0);
15979 IEM_MC_ENDIF();
15980 IEM_MC_ADVANCE_RIP();
15981
15982 IEM_MC_END();
15983 return VINF_SUCCESS;
15984}
15985
15986
15987/** Opcode 0xdb 11/1. */
15988FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15989{
15990 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15992
15993 IEM_MC_BEGIN(0, 1);
15994 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15995
15996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15998
15999 IEM_MC_PREPARE_FPU_USAGE();
16000 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16001 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
16002 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16003 IEM_MC_ENDIF();
16004 IEM_MC_UPDATE_FPU_OPCODE_IP();
16005 IEM_MC_ELSE()
16006 IEM_MC_FPU_STACK_UNDERFLOW(0);
16007 IEM_MC_ENDIF();
16008 IEM_MC_ADVANCE_RIP();
16009
16010 IEM_MC_END();
16011 return VINF_SUCCESS;
16012}
16013
16014
16015/** Opcode 0xdb 11/2. */
16016FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
16017{
16018 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
16019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16020
16021 IEM_MC_BEGIN(0, 1);
16022 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16023
16024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16026
16027 IEM_MC_PREPARE_FPU_USAGE();
16028 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16029 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
16030 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16031 IEM_MC_ENDIF();
16032 IEM_MC_UPDATE_FPU_OPCODE_IP();
16033 IEM_MC_ELSE()
16034 IEM_MC_FPU_STACK_UNDERFLOW(0);
16035 IEM_MC_ENDIF();
16036 IEM_MC_ADVANCE_RIP();
16037
16038 IEM_MC_END();
16039 return VINF_SUCCESS;
16040}
16041
16042
16043/** Opcode 0xdb 11/3. */
16044FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
16045{
16046 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
16047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16048
16049 IEM_MC_BEGIN(0, 1);
16050 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16051
16052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16053 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16054
16055 IEM_MC_PREPARE_FPU_USAGE();
16056 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16057 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16058 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16059 IEM_MC_ENDIF();
16060 IEM_MC_UPDATE_FPU_OPCODE_IP();
16061 IEM_MC_ELSE()
16062 IEM_MC_FPU_STACK_UNDERFLOW(0);
16063 IEM_MC_ENDIF();
16064 IEM_MC_ADVANCE_RIP();
16065
16066 IEM_MC_END();
16067 return VINF_SUCCESS;
16068}
16069
16070
16071/** Opcode 0xdb 0xe0. */
16072FNIEMOP_DEF(iemOp_fneni)
16073{
16074 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16076 IEM_MC_BEGIN(0,0);
16077 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16078 IEM_MC_ADVANCE_RIP();
16079 IEM_MC_END();
16080 return VINF_SUCCESS;
16081}
16082
16083
16084/** Opcode 0xdb 0xe1. */
16085FNIEMOP_DEF(iemOp_fndisi)
16086{
16087 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16089 IEM_MC_BEGIN(0,0);
16090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16091 IEM_MC_ADVANCE_RIP();
16092 IEM_MC_END();
16093 return VINF_SUCCESS;
16094}
16095
16096
16097/** Opcode 0xdb 0xe2. */
16098FNIEMOP_DEF(iemOp_fnclex)
16099{
16100 IEMOP_MNEMONIC(fnclex, "fnclex");
16101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16102
16103 IEM_MC_BEGIN(0,0);
16104 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16105 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16106 IEM_MC_CLEAR_FSW_EX();
16107 IEM_MC_ADVANCE_RIP();
16108 IEM_MC_END();
16109 return VINF_SUCCESS;
16110}
16111
16112
16113/** Opcode 0xdb 0xe3. */
16114FNIEMOP_DEF(iemOp_fninit)
16115{
16116 IEMOP_MNEMONIC(fninit, "fninit");
16117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16118 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16119}
16120
16121
16122/** Opcode 0xdb 0xe4. */
16123FNIEMOP_DEF(iemOp_fnsetpm)
16124{
16125 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16127 IEM_MC_BEGIN(0,0);
16128 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16129 IEM_MC_ADVANCE_RIP();
16130 IEM_MC_END();
16131 return VINF_SUCCESS;
16132}
16133
16134
16135/** Opcode 0xdb 0xe5. */
16136FNIEMOP_DEF(iemOp_frstpm)
16137{
16138 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16139#if 0 /* #UDs on newer CPUs */
16140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16141 IEM_MC_BEGIN(0,0);
16142 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16143 IEM_MC_ADVANCE_RIP();
16144 IEM_MC_END();
16145 return VINF_SUCCESS;
16146#else
16147 return IEMOP_RAISE_INVALID_OPCODE();
16148#endif
16149}
16150
16151
16152/** Opcode 0xdb 11/5. */
16153FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16154{
16155 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16156 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16157}
16158
16159
16160/** Opcode 0xdb 11/6. */
16161FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16162{
16163 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16164 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16165}
16166
16167
16168/** Opcode 0xdb. */
16169FNIEMOP_DEF(iemOp_EscF3)
16170{
16171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16172 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16174 {
16175 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16176 {
16177 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16178 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16179 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16180 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16181 case 4:
16182 switch (bRm)
16183 {
16184 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16185 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16186 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16187 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16188 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16189 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16190 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16191 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16193 }
16194 break;
16195 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16196 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16197 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16199 }
16200 }
16201 else
16202 {
16203 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16204 {
16205 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16206 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16207 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16208 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16209 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16210 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16211 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16212 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16214 }
16215 }
16216}
16217
16218
16219/**
16220 * Common worker for FPU instructions working on STn and ST0, and storing the
16221 * result in STn unless IE, DE or ZE was raised.
16222 *
16223 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16224 */
16225FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16226{
16227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16228
16229 IEM_MC_BEGIN(3, 1);
16230 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16231 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16232 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16233 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16234
16235 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16236 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16237
16238 IEM_MC_PREPARE_FPU_USAGE();
16239 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16240 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16241 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16242 IEM_MC_ELSE()
16243 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16244 IEM_MC_ENDIF();
16245 IEM_MC_ADVANCE_RIP();
16246
16247 IEM_MC_END();
16248 return VINF_SUCCESS;
16249}
16250
16251
16252/** Opcode 0xdc 11/0. */
16253FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16254{
16255 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16256 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16257}
16258
16259
16260/** Opcode 0xdc 11/1. */
16261FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16262{
16263 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16264 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16265}
16266
16267
16268/** Opcode 0xdc 11/4. */
16269FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16270{
16271 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16272 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16273}
16274
16275
16276/** Opcode 0xdc 11/5. */
16277FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16278{
16279 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16280 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16281}
16282
16283
16284/** Opcode 0xdc 11/6. */
16285FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16286{
16287 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16288 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16289}
16290
16291
16292/** Opcode 0xdc 11/7. */
16293FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16294{
16295 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16296 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16297}
16298
16299
16300/**
16301 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16302 * memory operand, and storing the result in ST0.
16303 *
16304 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16305 */
16306FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16307{
16308 IEM_MC_BEGIN(3, 3);
16309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16310 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16311 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16312 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16313 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16314 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16315
16316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16318 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16319 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16320
16321 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16322 IEM_MC_PREPARE_FPU_USAGE();
16323 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16324 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16325 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16326 IEM_MC_ELSE()
16327 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16328 IEM_MC_ENDIF();
16329 IEM_MC_ADVANCE_RIP();
16330
16331 IEM_MC_END();
16332 return VINF_SUCCESS;
16333}
16334
16335
16336/** Opcode 0xdc !11/0. */
16337FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16338{
16339 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16340 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16341}
16342
16343
16344/** Opcode 0xdc !11/1. */
16345FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16346{
16347 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16348 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16349}
16350
16351
16352/** Opcode 0xdc !11/2. */
16353FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16354{
16355 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16356
16357 IEM_MC_BEGIN(3, 3);
16358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16359 IEM_MC_LOCAL(uint16_t, u16Fsw);
16360 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16361 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16362 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16363 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16364
16365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16367
16368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16369 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16370 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16371
16372 IEM_MC_PREPARE_FPU_USAGE();
16373 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16374 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16375 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16376 IEM_MC_ELSE()
16377 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16378 IEM_MC_ENDIF();
16379 IEM_MC_ADVANCE_RIP();
16380
16381 IEM_MC_END();
16382 return VINF_SUCCESS;
16383}
16384
16385
16386/** Opcode 0xdc !11/3. */
16387FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16388{
16389 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16390
16391 IEM_MC_BEGIN(3, 3);
16392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16393 IEM_MC_LOCAL(uint16_t, u16Fsw);
16394 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16395 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16396 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16397 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16398
16399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16401
16402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16404 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16405
16406 IEM_MC_PREPARE_FPU_USAGE();
16407 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16408 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16409 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16410 IEM_MC_ELSE()
16411 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16412 IEM_MC_ENDIF();
16413 IEM_MC_ADVANCE_RIP();
16414
16415 IEM_MC_END();
16416 return VINF_SUCCESS;
16417}
16418
16419
16420/** Opcode 0xdc !11/4. */
16421FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16422{
16423 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16424 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16425}
16426
16427
16428/** Opcode 0xdc !11/5. */
16429FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16430{
16431 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16432 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16433}
16434
16435
16436/** Opcode 0xdc !11/6. */
16437FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16438{
16439 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16440 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16441}
16442
16443
16444/** Opcode 0xdc !11/7. */
16445FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16446{
16447 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16448 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16449}
16450
16451
16452/** Opcode 0xdc. */
16453FNIEMOP_DEF(iemOp_EscF4)
16454{
16455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16456 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16458 {
16459 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16460 {
16461 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16462 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16463 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16464 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16465 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16466 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16467 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16468 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16470 }
16471 }
16472 else
16473 {
16474 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16475 {
16476 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16477 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16478 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16479 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16480 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16481 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16482 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16483 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16485 }
16486 }
16487}
16488
16489
16490/** Opcode 0xdd !11/0.
16491 * @sa iemOp_fld_m32r */
16492FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16493{
16494 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16495
16496 IEM_MC_BEGIN(2, 3);
16497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16498 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16499 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16500 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16501 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16502
16503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16506 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16507
16508 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16509 IEM_MC_PREPARE_FPU_USAGE();
16510 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16511 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16512 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16513 IEM_MC_ELSE()
16514 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16515 IEM_MC_ENDIF();
16516 IEM_MC_ADVANCE_RIP();
16517
16518 IEM_MC_END();
16519 return VINF_SUCCESS;
16520}
16521
16522
16523/** Opcode 0xdd !11/0. */
16524FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16525{
16526 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16527 IEM_MC_BEGIN(3, 2);
16528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16529 IEM_MC_LOCAL(uint16_t, u16Fsw);
16530 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16531 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16533
16534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16537 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16538
16539 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16540 IEM_MC_PREPARE_FPU_USAGE();
16541 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16542 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16543 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16544 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16545 IEM_MC_ELSE()
16546 IEM_MC_IF_FCW_IM()
16547 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16548 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16549 IEM_MC_ENDIF();
16550 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16551 IEM_MC_ENDIF();
16552 IEM_MC_ADVANCE_RIP();
16553
16554 IEM_MC_END();
16555 return VINF_SUCCESS;
16556}
16557
16558
16559/** Opcode 0xdd !11/0. */
16560FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16561{
16562 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16563 IEM_MC_BEGIN(3, 2);
16564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16565 IEM_MC_LOCAL(uint16_t, u16Fsw);
16566 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16567 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16568 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16569
16570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16572 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16573 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16574
16575 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16576 IEM_MC_PREPARE_FPU_USAGE();
16577 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16578 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16579 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16580 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16581 IEM_MC_ELSE()
16582 IEM_MC_IF_FCW_IM()
16583 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16584 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16585 IEM_MC_ENDIF();
16586 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16587 IEM_MC_ENDIF();
16588 IEM_MC_ADVANCE_RIP();
16589
16590 IEM_MC_END();
16591 return VINF_SUCCESS;
16592}
16593
16594
16595
16596
16597/** Opcode 0xdd !11/0. */
16598FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16599{
16600 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16601 IEM_MC_BEGIN(3, 2);
16602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16603 IEM_MC_LOCAL(uint16_t, u16Fsw);
16604 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16605 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16606 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16607
16608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16610 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16611 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16612
16613 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16614 IEM_MC_PREPARE_FPU_USAGE();
16615 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16616 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16617 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16618 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16619 IEM_MC_ELSE()
16620 IEM_MC_IF_FCW_IM()
16621 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16622 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16623 IEM_MC_ENDIF();
16624 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16625 IEM_MC_ENDIF();
16626 IEM_MC_ADVANCE_RIP();
16627
16628 IEM_MC_END();
16629 return VINF_SUCCESS;
16630}
16631
16632
16633/** Opcode 0xdd !11/0. */
16634FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16635{
16636 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16637 IEM_MC_BEGIN(3, 0);
16638 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16639 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16640 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16643 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16644 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16645 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16646 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16647 IEM_MC_END();
16648 return VINF_SUCCESS;
16649}
16650
16651
16652/** Opcode 0xdd !11/0. */
16653FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16654{
16655 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16656 IEM_MC_BEGIN(3, 0);
16657 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16658 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16659 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16663 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16664 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16665 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16666 IEM_MC_END();
16667 return VINF_SUCCESS;
16668
16669}
16670
16671/** Opcode 0xdd !11/0. */
16672FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16673{
16674 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16675
16676 IEM_MC_BEGIN(0, 2);
16677 IEM_MC_LOCAL(uint16_t, u16Tmp);
16678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16679
16680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16683
16684 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16685 IEM_MC_FETCH_FSW(u16Tmp);
16686 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16687 IEM_MC_ADVANCE_RIP();
16688
16689/** @todo Debug / drop a hint to the verifier that things may differ
16690 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16691 * NT4SP1. (X86_FSW_PE) */
16692 IEM_MC_END();
16693 return VINF_SUCCESS;
16694}
16695
16696
16697/** Opcode 0xdd 11/0. */
16698FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16699{
16700 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16702 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16703 unmodified. */
16704
16705 IEM_MC_BEGIN(0, 0);
16706
16707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16708 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16709
16710 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16711 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16712 IEM_MC_UPDATE_FPU_OPCODE_IP();
16713
16714 IEM_MC_ADVANCE_RIP();
16715 IEM_MC_END();
16716 return VINF_SUCCESS;
16717}
16718
16719
16720/** Opcode 0xdd 11/1. */
16721FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16722{
16723 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16725
16726 IEM_MC_BEGIN(0, 2);
16727 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16728 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16730 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16731
16732 IEM_MC_PREPARE_FPU_USAGE();
16733 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16734 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16735 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16736 IEM_MC_ELSE()
16737 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16738 IEM_MC_ENDIF();
16739
16740 IEM_MC_ADVANCE_RIP();
16741 IEM_MC_END();
16742 return VINF_SUCCESS;
16743}
16744
16745
16746/** Opcode 0xdd 11/3. */
16747FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16748{
16749 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16750 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16751}
16752
16753
16754/** Opcode 0xdd 11/4. */
16755FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16756{
16757 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16758 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16759}
16760
16761
16762/** Opcode 0xdd. */
16763FNIEMOP_DEF(iemOp_EscF5)
16764{
16765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16766 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16768 {
16769 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16770 {
16771 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16772 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16773 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16774 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16775 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16776 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16777 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16778 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16780 }
16781 }
16782 else
16783 {
16784 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16785 {
16786 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16787 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16788 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16789 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16790 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16791 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16792 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16793 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16795 }
16796 }
16797}
16798
16799
16800/** Opcode 0xde 11/0. */
16801FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16802{
16803 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16804 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16805}
16806
16807
16808/** Opcode 0xde 11/0. */
16809FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16810{
16811 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16812 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16813}
16814
16815
16816/** Opcode 0xde 0xd9. */
16817FNIEMOP_DEF(iemOp_fcompp)
16818{
16819 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16820 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16821}
16822
16823
16824/** Opcode 0xde 11/4. */
16825FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16826{
16827 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16828 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16829}
16830
16831
16832/** Opcode 0xde 11/5. */
16833FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16834{
16835 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16836 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16837}
16838
16839
16840/** Opcode 0xde 11/6. */
16841FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16842{
16843 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16844 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16845}
16846
16847
16848/** Opcode 0xde 11/7. */
16849FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16850{
16851 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16852 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16853}
16854
16855
16856/**
16857 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16858 * the result in ST0.
16859 *
16860 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16861 */
16862FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16863{
16864 IEM_MC_BEGIN(3, 3);
16865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16866 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16867 IEM_MC_LOCAL(int16_t, i16Val2);
16868 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16869 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16870 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16871
16872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16874
16875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16877 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16878
16879 IEM_MC_PREPARE_FPU_USAGE();
16880 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16881 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16882 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16883 IEM_MC_ELSE()
16884 IEM_MC_FPU_STACK_UNDERFLOW(0);
16885 IEM_MC_ENDIF();
16886 IEM_MC_ADVANCE_RIP();
16887
16888 IEM_MC_END();
16889 return VINF_SUCCESS;
16890}
16891
16892
16893/** Opcode 0xde !11/0. */
16894FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16895{
16896 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16897 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16898}
16899
16900
16901/** Opcode 0xde !11/1. */
16902FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16903{
16904 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16905 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16906}
16907
16908
16909/** Opcode 0xde !11/2. */
16910FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16911{
16912 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16913
16914 IEM_MC_BEGIN(3, 3);
16915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16916 IEM_MC_LOCAL(uint16_t, u16Fsw);
16917 IEM_MC_LOCAL(int16_t, i16Val2);
16918 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16919 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16920 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16921
16922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16924
16925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16926 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16927 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16928
16929 IEM_MC_PREPARE_FPU_USAGE();
16930 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16931 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16932 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16933 IEM_MC_ELSE()
16934 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16935 IEM_MC_ENDIF();
16936 IEM_MC_ADVANCE_RIP();
16937
16938 IEM_MC_END();
16939 return VINF_SUCCESS;
16940}
16941
16942
16943/** Opcode 0xde !11/3. */
16944FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16945{
16946 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16947
16948 IEM_MC_BEGIN(3, 3);
16949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16950 IEM_MC_LOCAL(uint16_t, u16Fsw);
16951 IEM_MC_LOCAL(int16_t, i16Val2);
16952 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16953 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16954 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16955
16956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16958
16959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16961 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16962
16963 IEM_MC_PREPARE_FPU_USAGE();
16964 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16965 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16966 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16967 IEM_MC_ELSE()
16968 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16969 IEM_MC_ENDIF();
16970 IEM_MC_ADVANCE_RIP();
16971
16972 IEM_MC_END();
16973 return VINF_SUCCESS;
16974}
16975
16976
16977/** Opcode 0xde !11/4. */
16978FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16979{
16980 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16981 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16982}
16983
16984
16985/** Opcode 0xde !11/5. */
16986FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16987{
16988 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16989 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16990}
16991
16992
16993/** Opcode 0xde !11/6. */
16994FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16995{
16996 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16997 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16998}
16999
17000
17001/** Opcode 0xde !11/7. */
17002FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
17003{
17004 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
17005 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
17006}
17007
17008
17009/** Opcode 0xde. */
17010FNIEMOP_DEF(iemOp_EscF6)
17011{
17012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17013 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
17014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17015 {
17016 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17017 {
17018 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
17019 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
17020 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
17021 case 3: if (bRm == 0xd9)
17022 return FNIEMOP_CALL(iemOp_fcompp);
17023 return IEMOP_RAISE_INVALID_OPCODE();
17024 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
17025 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
17026 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
17027 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
17028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17029 }
17030 }
17031 else
17032 {
17033 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17034 {
17035 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
17036 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
17037 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
17038 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
17039 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
17040 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
17041 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
17042 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
17043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17044 }
17045 }
17046}
17047
17048
17049/** Opcode 0xdf 11/0.
17050 * Undocument instruction, assumed to work like ffree + fincstp. */
17051FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17052{
17053 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17055
17056 IEM_MC_BEGIN(0, 0);
17057
17058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17060
17061 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17062 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17063 IEM_MC_FPU_STACK_INC_TOP();
17064 IEM_MC_UPDATE_FPU_OPCODE_IP();
17065
17066 IEM_MC_ADVANCE_RIP();
17067 IEM_MC_END();
17068 return VINF_SUCCESS;
17069}
17070
17071
17072/** Opcode 0xdf 0xe0. */
17073FNIEMOP_DEF(iemOp_fnstsw_ax)
17074{
17075 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17077
17078 IEM_MC_BEGIN(0, 1);
17079 IEM_MC_LOCAL(uint16_t, u16Tmp);
17080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17081 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17082 IEM_MC_FETCH_FSW(u16Tmp);
17083 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17084 IEM_MC_ADVANCE_RIP();
17085 IEM_MC_END();
17086 return VINF_SUCCESS;
17087}
17088
17089
17090/** Opcode 0xdf 11/5. */
17091FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17092{
17093 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17094 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17095}
17096
17097
17098/** Opcode 0xdf 11/6. */
17099FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17100{
17101 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17102 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17103}
17104
17105
17106/** Opcode 0xdf !11/0. */
17107FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17108{
17109 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17110
17111 IEM_MC_BEGIN(2, 3);
17112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17113 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17114 IEM_MC_LOCAL(int16_t, i16Val);
17115 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17116 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17117
17118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17120
17121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17123 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17124
17125 IEM_MC_PREPARE_FPU_USAGE();
17126 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17127 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17128 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17129 IEM_MC_ELSE()
17130 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17131 IEM_MC_ENDIF();
17132 IEM_MC_ADVANCE_RIP();
17133
17134 IEM_MC_END();
17135 return VINF_SUCCESS;
17136}
17137
17138
17139/** Opcode 0xdf !11/1. */
17140FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17141{
17142 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17143 IEM_MC_BEGIN(3, 2);
17144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17145 IEM_MC_LOCAL(uint16_t, u16Fsw);
17146 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17147 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17148 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17149
17150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17154
17155 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17156 IEM_MC_PREPARE_FPU_USAGE();
17157 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17158 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17159 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17160 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17161 IEM_MC_ELSE()
17162 IEM_MC_IF_FCW_IM()
17163 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17164 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17165 IEM_MC_ENDIF();
17166 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17167 IEM_MC_ENDIF();
17168 IEM_MC_ADVANCE_RIP();
17169
17170 IEM_MC_END();
17171 return VINF_SUCCESS;
17172}
17173
17174
17175/** Opcode 0xdf !11/2. */
17176FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17177{
17178 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17179 IEM_MC_BEGIN(3, 2);
17180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17181 IEM_MC_LOCAL(uint16_t, u16Fsw);
17182 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17183 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17184 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17185
17186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17189 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17190
17191 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17192 IEM_MC_PREPARE_FPU_USAGE();
17193 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17194 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17195 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17196 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17197 IEM_MC_ELSE()
17198 IEM_MC_IF_FCW_IM()
17199 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17200 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17201 IEM_MC_ENDIF();
17202 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17203 IEM_MC_ENDIF();
17204 IEM_MC_ADVANCE_RIP();
17205
17206 IEM_MC_END();
17207 return VINF_SUCCESS;
17208}
17209
17210
17211/** Opcode 0xdf !11/3. */
17212FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17213{
17214 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17215 IEM_MC_BEGIN(3, 2);
17216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17217 IEM_MC_LOCAL(uint16_t, u16Fsw);
17218 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17219 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17220 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17221
17222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17226
17227 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17228 IEM_MC_PREPARE_FPU_USAGE();
17229 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17230 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17231 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17232 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17233 IEM_MC_ELSE()
17234 IEM_MC_IF_FCW_IM()
17235 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17236 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17237 IEM_MC_ENDIF();
17238 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17239 IEM_MC_ENDIF();
17240 IEM_MC_ADVANCE_RIP();
17241
17242 IEM_MC_END();
17243 return VINF_SUCCESS;
17244}
17245
17246
17247/** Opcode 0xdf !11/4. */
17248FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17249
17250
17251/** Opcode 0xdf !11/5. */
17252FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17253{
17254 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17255
17256 IEM_MC_BEGIN(2, 3);
17257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17258 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17259 IEM_MC_LOCAL(int64_t, i64Val);
17260 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17261 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17262
17263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17265
17266 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17267 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17268 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17269
17270 IEM_MC_PREPARE_FPU_USAGE();
17271 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17272 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17273 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17274 IEM_MC_ELSE()
17275 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17276 IEM_MC_ENDIF();
17277 IEM_MC_ADVANCE_RIP();
17278
17279 IEM_MC_END();
17280 return VINF_SUCCESS;
17281}
17282
17283
17284/** Opcode 0xdf !11/6. */
17285FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17286
17287
17288/** Opcode 0xdf !11/7. */
17289FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17290{
17291 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17292 IEM_MC_BEGIN(3, 2);
17293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17294 IEM_MC_LOCAL(uint16_t, u16Fsw);
17295 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17296 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17297 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17298
17299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17301 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17302 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17303
17304 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17305 IEM_MC_PREPARE_FPU_USAGE();
17306 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17307 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17308 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17309 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17310 IEM_MC_ELSE()
17311 IEM_MC_IF_FCW_IM()
17312 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17313 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17314 IEM_MC_ENDIF();
17315 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17316 IEM_MC_ENDIF();
17317 IEM_MC_ADVANCE_RIP();
17318
17319 IEM_MC_END();
17320 return VINF_SUCCESS;
17321}
17322
17323
17324/** Opcode 0xdf. */
17325FNIEMOP_DEF(iemOp_EscF7)
17326{
17327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17328 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17329 {
17330 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17331 {
17332 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17333 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17334 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17335 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17336 case 4: if (bRm == 0xe0)
17337 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17338 return IEMOP_RAISE_INVALID_OPCODE();
17339 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17340 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17341 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17343 }
17344 }
17345 else
17346 {
17347 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17348 {
17349 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17350 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17351 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17352 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17353 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17354 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17355 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17356 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17358 }
17359 }
17360}
17361
17362
17363/** Opcode 0xe0. */
17364FNIEMOP_DEF(iemOp_loopne_Jb)
17365{
17366 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17367 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17369 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17370
17371 switch (pVCpu->iem.s.enmEffAddrMode)
17372 {
17373 case IEMMODE_16BIT:
17374 IEM_MC_BEGIN(0,0);
17375 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17376 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17377 IEM_MC_REL_JMP_S8(i8Imm);
17378 } IEM_MC_ELSE() {
17379 IEM_MC_ADVANCE_RIP();
17380 } IEM_MC_ENDIF();
17381 IEM_MC_END();
17382 return VINF_SUCCESS;
17383
17384 case IEMMODE_32BIT:
17385 IEM_MC_BEGIN(0,0);
17386 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17387 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17388 IEM_MC_REL_JMP_S8(i8Imm);
17389 } IEM_MC_ELSE() {
17390 IEM_MC_ADVANCE_RIP();
17391 } IEM_MC_ENDIF();
17392 IEM_MC_END();
17393 return VINF_SUCCESS;
17394
17395 case IEMMODE_64BIT:
17396 IEM_MC_BEGIN(0,0);
17397 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17398 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17399 IEM_MC_REL_JMP_S8(i8Imm);
17400 } IEM_MC_ELSE() {
17401 IEM_MC_ADVANCE_RIP();
17402 } IEM_MC_ENDIF();
17403 IEM_MC_END();
17404 return VINF_SUCCESS;
17405
17406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17407 }
17408}
17409
17410
17411/** Opcode 0xe1. */
17412FNIEMOP_DEF(iemOp_loope_Jb)
17413{
17414 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17415 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17417 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17418
17419 switch (pVCpu->iem.s.enmEffAddrMode)
17420 {
17421 case IEMMODE_16BIT:
17422 IEM_MC_BEGIN(0,0);
17423 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17424 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17425 IEM_MC_REL_JMP_S8(i8Imm);
17426 } IEM_MC_ELSE() {
17427 IEM_MC_ADVANCE_RIP();
17428 } IEM_MC_ENDIF();
17429 IEM_MC_END();
17430 return VINF_SUCCESS;
17431
17432 case IEMMODE_32BIT:
17433 IEM_MC_BEGIN(0,0);
17434 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17435 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17436 IEM_MC_REL_JMP_S8(i8Imm);
17437 } IEM_MC_ELSE() {
17438 IEM_MC_ADVANCE_RIP();
17439 } IEM_MC_ENDIF();
17440 IEM_MC_END();
17441 return VINF_SUCCESS;
17442
17443 case IEMMODE_64BIT:
17444 IEM_MC_BEGIN(0,0);
17445 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17446 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17447 IEM_MC_REL_JMP_S8(i8Imm);
17448 } IEM_MC_ELSE() {
17449 IEM_MC_ADVANCE_RIP();
17450 } IEM_MC_ENDIF();
17451 IEM_MC_END();
17452 return VINF_SUCCESS;
17453
17454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17455 }
17456}
17457
17458
17459/** Opcode 0xe2. */
17460FNIEMOP_DEF(iemOp_loop_Jb)
17461{
17462 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17463 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17465 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17466
17467 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17468 * using the 32-bit operand size override. How can that be restarted? See
17469 * weird pseudo code in intel manual. */
17470 switch (pVCpu->iem.s.enmEffAddrMode)
17471 {
17472 case IEMMODE_16BIT:
17473 IEM_MC_BEGIN(0,0);
17474 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17475 {
17476 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17477 IEM_MC_IF_CX_IS_NZ() {
17478 IEM_MC_REL_JMP_S8(i8Imm);
17479 } IEM_MC_ELSE() {
17480 IEM_MC_ADVANCE_RIP();
17481 } IEM_MC_ENDIF();
17482 }
17483 else
17484 {
17485 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17486 IEM_MC_ADVANCE_RIP();
17487 }
17488 IEM_MC_END();
17489 return VINF_SUCCESS;
17490
17491 case IEMMODE_32BIT:
17492 IEM_MC_BEGIN(0,0);
17493 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17494 {
17495 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17496 IEM_MC_IF_ECX_IS_NZ() {
17497 IEM_MC_REL_JMP_S8(i8Imm);
17498 } IEM_MC_ELSE() {
17499 IEM_MC_ADVANCE_RIP();
17500 } IEM_MC_ENDIF();
17501 }
17502 else
17503 {
17504 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17505 IEM_MC_ADVANCE_RIP();
17506 }
17507 IEM_MC_END();
17508 return VINF_SUCCESS;
17509
17510 case IEMMODE_64BIT:
17511 IEM_MC_BEGIN(0,0);
17512 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17513 {
17514 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17515 IEM_MC_IF_RCX_IS_NZ() {
17516 IEM_MC_REL_JMP_S8(i8Imm);
17517 } IEM_MC_ELSE() {
17518 IEM_MC_ADVANCE_RIP();
17519 } IEM_MC_ENDIF();
17520 }
17521 else
17522 {
17523 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17524 IEM_MC_ADVANCE_RIP();
17525 }
17526 IEM_MC_END();
17527 return VINF_SUCCESS;
17528
17529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17530 }
17531}
17532
17533
17534/** Opcode 0xe3. */
17535FNIEMOP_DEF(iemOp_jecxz_Jb)
17536{
17537 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17538 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17540 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17541
17542 switch (pVCpu->iem.s.enmEffAddrMode)
17543 {
17544 case IEMMODE_16BIT:
17545 IEM_MC_BEGIN(0,0);
17546 IEM_MC_IF_CX_IS_NZ() {
17547 IEM_MC_ADVANCE_RIP();
17548 } IEM_MC_ELSE() {
17549 IEM_MC_REL_JMP_S8(i8Imm);
17550 } IEM_MC_ENDIF();
17551 IEM_MC_END();
17552 return VINF_SUCCESS;
17553
17554 case IEMMODE_32BIT:
17555 IEM_MC_BEGIN(0,0);
17556 IEM_MC_IF_ECX_IS_NZ() {
17557 IEM_MC_ADVANCE_RIP();
17558 } IEM_MC_ELSE() {
17559 IEM_MC_REL_JMP_S8(i8Imm);
17560 } IEM_MC_ENDIF();
17561 IEM_MC_END();
17562 return VINF_SUCCESS;
17563
17564 case IEMMODE_64BIT:
17565 IEM_MC_BEGIN(0,0);
17566 IEM_MC_IF_RCX_IS_NZ() {
17567 IEM_MC_ADVANCE_RIP();
17568 } IEM_MC_ELSE() {
17569 IEM_MC_REL_JMP_S8(i8Imm);
17570 } IEM_MC_ENDIF();
17571 IEM_MC_END();
17572 return VINF_SUCCESS;
17573
17574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17575 }
17576}
17577
17578
17579/** Opcode 0xe4 */
17580FNIEMOP_DEF(iemOp_in_AL_Ib)
17581{
17582 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17583 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17585 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17586}
17587
17588
17589/** Opcode 0xe5 */
17590FNIEMOP_DEF(iemOp_in_eAX_Ib)
17591{
17592 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17593 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17595 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17596}
17597
17598
17599/** Opcode 0xe6 */
17600FNIEMOP_DEF(iemOp_out_Ib_AL)
17601{
17602 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17603 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17605 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17606}
17607
17608
17609/** Opcode 0xe7 */
17610FNIEMOP_DEF(iemOp_out_Ib_eAX)
17611{
17612 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17613 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17615 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17616}
17617
17618
17619/** Opcode 0xe8. */
17620FNIEMOP_DEF(iemOp_call_Jv)
17621{
17622 IEMOP_MNEMONIC(call_Jv, "call Jv");
17623 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17624 switch (pVCpu->iem.s.enmEffOpSize)
17625 {
17626 case IEMMODE_16BIT:
17627 {
17628 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17629 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17630 }
17631
17632 case IEMMODE_32BIT:
17633 {
17634 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17635 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17636 }
17637
17638 case IEMMODE_64BIT:
17639 {
17640 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17641 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17642 }
17643
17644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17645 }
17646}
17647
17648
17649/** Opcode 0xe9. */
17650FNIEMOP_DEF(iemOp_jmp_Jv)
17651{
17652 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17654 switch (pVCpu->iem.s.enmEffOpSize)
17655 {
17656 case IEMMODE_16BIT:
17657 {
17658 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17659 IEM_MC_BEGIN(0, 0);
17660 IEM_MC_REL_JMP_S16(i16Imm);
17661 IEM_MC_END();
17662 return VINF_SUCCESS;
17663 }
17664
17665 case IEMMODE_64BIT:
17666 case IEMMODE_32BIT:
17667 {
17668 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17669 IEM_MC_BEGIN(0, 0);
17670 IEM_MC_REL_JMP_S32(i32Imm);
17671 IEM_MC_END();
17672 return VINF_SUCCESS;
17673 }
17674
17675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17676 }
17677}
17678
17679
17680/** Opcode 0xea. */
17681FNIEMOP_DEF(iemOp_jmp_Ap)
17682{
17683 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17684 IEMOP_HLP_NO_64BIT();
17685
17686 /* Decode the far pointer address and pass it on to the far call C implementation. */
17687 uint32_t offSeg;
17688 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17689 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17690 else
17691 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17692 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17694 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17695}
17696
17697
17698/** Opcode 0xeb. */
17699FNIEMOP_DEF(iemOp_jmp_Jb)
17700{
17701 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17702 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17704 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17705
17706 IEM_MC_BEGIN(0, 0);
17707 IEM_MC_REL_JMP_S8(i8Imm);
17708 IEM_MC_END();
17709 return VINF_SUCCESS;
17710}
17711
17712
17713/** Opcode 0xec */
17714FNIEMOP_DEF(iemOp_in_AL_DX)
17715{
17716 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17718 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17719}
17720
17721
17722/** Opcode 0xed */
17723FNIEMOP_DEF(iemOp_eAX_DX)
17724{
17725 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17727 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17728}
17729
17730
17731/** Opcode 0xee */
17732FNIEMOP_DEF(iemOp_out_DX_AL)
17733{
17734 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17736 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17737}
17738
17739
17740/** Opcode 0xef */
17741FNIEMOP_DEF(iemOp_out_DX_eAX)
17742{
17743 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17745 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17746}
17747
17748
17749/** Opcode 0xf0. */
17750FNIEMOP_DEF(iemOp_lock)
17751{
17752 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17753 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17754
17755 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17756 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17757}
17758
17759
17760/** Opcode 0xf1. */
17761FNIEMOP_DEF(iemOp_int_1)
17762{
17763 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17764 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17765 /** @todo testcase! */
17766 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17767}
17768
17769
17770/** Opcode 0xf2. */
17771FNIEMOP_DEF(iemOp_repne)
17772{
17773 /* This overrides any previous REPE prefix. */
17774 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17775 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17776 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17777
17778 /* For the 4 entry opcode tables, REPNZ overrides any previous
17779 REPZ and operand size prefixes. */
17780 pVCpu->iem.s.idxPrefix = 3;
17781
17782 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17783 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17784}
17785
17786
17787/** Opcode 0xf3. */
17788FNIEMOP_DEF(iemOp_repe)
17789{
17790 /* This overrides any previous REPNE prefix. */
17791 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17792 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17793 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17794
17795 /* For the 4 entry opcode tables, REPNZ overrides any previous
17796 REPNZ and operand size prefixes. */
17797 pVCpu->iem.s.idxPrefix = 2;
17798
17799 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17800 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17801}
17802
17803
17804/** Opcode 0xf4. */
17805FNIEMOP_DEF(iemOp_hlt)
17806{
17807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17808 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17809}
17810
17811
17812/** Opcode 0xf5. */
17813FNIEMOP_DEF(iemOp_cmc)
17814{
17815 IEMOP_MNEMONIC(cmc, "cmc");
17816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17817 IEM_MC_BEGIN(0, 0);
17818 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17819 IEM_MC_ADVANCE_RIP();
17820 IEM_MC_END();
17821 return VINF_SUCCESS;
17822}
17823
17824
17825/**
17826 * Common implementation of 'inc/dec/not/neg Eb'.
17827 *
17828 * @param bRm The RM byte.
17829 * @param pImpl The instruction implementation.
17830 */
17831FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17832{
17833 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17834 {
17835 /* register access */
17836 IEM_MC_BEGIN(2, 0);
17837 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17838 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17839 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17840 IEM_MC_REF_EFLAGS(pEFlags);
17841 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17842 IEM_MC_ADVANCE_RIP();
17843 IEM_MC_END();
17844 }
17845 else
17846 {
17847 /* memory access. */
17848 IEM_MC_BEGIN(2, 2);
17849 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17850 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17852
17853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17854 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17855 IEM_MC_FETCH_EFLAGS(EFlags);
17856 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17857 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17858 else
17859 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17860
17861 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17862 IEM_MC_COMMIT_EFLAGS(EFlags);
17863 IEM_MC_ADVANCE_RIP();
17864 IEM_MC_END();
17865 }
17866 return VINF_SUCCESS;
17867}
17868
17869
17870/**
17871 * Common implementation of 'inc/dec/not/neg Ev'.
17872 *
17873 * @param bRm The RM byte.
17874 * @param pImpl The instruction implementation.
17875 */
17876FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17877{
17878 /* Registers are handled by a common worker. */
17879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17880 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17881
17882 /* Memory we do here. */
17883 switch (pVCpu->iem.s.enmEffOpSize)
17884 {
17885 case IEMMODE_16BIT:
17886 IEM_MC_BEGIN(2, 2);
17887 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17888 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17890
17891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17892 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17893 IEM_MC_FETCH_EFLAGS(EFlags);
17894 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17895 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17896 else
17897 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17898
17899 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17900 IEM_MC_COMMIT_EFLAGS(EFlags);
17901 IEM_MC_ADVANCE_RIP();
17902 IEM_MC_END();
17903 return VINF_SUCCESS;
17904
17905 case IEMMODE_32BIT:
17906 IEM_MC_BEGIN(2, 2);
17907 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17908 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17910
17911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17912 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17913 IEM_MC_FETCH_EFLAGS(EFlags);
17914 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17915 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17916 else
17917 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17918
17919 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17920 IEM_MC_COMMIT_EFLAGS(EFlags);
17921 IEM_MC_ADVANCE_RIP();
17922 IEM_MC_END();
17923 return VINF_SUCCESS;
17924
17925 case IEMMODE_64BIT:
17926 IEM_MC_BEGIN(2, 2);
17927 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17928 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17930
17931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17932 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17933 IEM_MC_FETCH_EFLAGS(EFlags);
17934 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17935 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17936 else
17937 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17938
17939 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17940 IEM_MC_COMMIT_EFLAGS(EFlags);
17941 IEM_MC_ADVANCE_RIP();
17942 IEM_MC_END();
17943 return VINF_SUCCESS;
17944
17945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17946 }
17947}
17948
17949
17950/** Opcode 0xf6 /0. */
17951FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17952{
17953 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17954 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17955
17956 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17957 {
17958 /* register access */
17959 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17961
17962 IEM_MC_BEGIN(3, 0);
17963 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17964 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17965 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17966 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17967 IEM_MC_REF_EFLAGS(pEFlags);
17968 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17969 IEM_MC_ADVANCE_RIP();
17970 IEM_MC_END();
17971 }
17972 else
17973 {
17974 /* memory access. */
17975 IEM_MC_BEGIN(3, 2);
17976 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17977 IEM_MC_ARG(uint8_t, u8Src, 1);
17978 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17980
17981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17982 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17983 IEM_MC_ASSIGN(u8Src, u8Imm);
17984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17985 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17986 IEM_MC_FETCH_EFLAGS(EFlags);
17987 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17988
17989 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17990 IEM_MC_COMMIT_EFLAGS(EFlags);
17991 IEM_MC_ADVANCE_RIP();
17992 IEM_MC_END();
17993 }
17994 return VINF_SUCCESS;
17995}
17996
17997
17998/** Opcode 0xf7 /0. */
17999FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
18000{
18001 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
18002 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18003
18004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18005 {
18006 /* register access */
18007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18008 switch (pVCpu->iem.s.enmEffOpSize)
18009 {
18010 case IEMMODE_16BIT:
18011 {
18012 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18013 IEM_MC_BEGIN(3, 0);
18014 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18015 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
18016 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18017 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18018 IEM_MC_REF_EFLAGS(pEFlags);
18019 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18020 IEM_MC_ADVANCE_RIP();
18021 IEM_MC_END();
18022 return VINF_SUCCESS;
18023 }
18024
18025 case IEMMODE_32BIT:
18026 {
18027 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18028 IEM_MC_BEGIN(3, 0);
18029 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18030 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
18031 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18032 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18033 IEM_MC_REF_EFLAGS(pEFlags);
18034 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18035 /* No clearing the high dword here - test doesn't write back the result. */
18036 IEM_MC_ADVANCE_RIP();
18037 IEM_MC_END();
18038 return VINF_SUCCESS;
18039 }
18040
18041 case IEMMODE_64BIT:
18042 {
18043 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18044 IEM_MC_BEGIN(3, 0);
18045 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18046 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
18047 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18048 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18049 IEM_MC_REF_EFLAGS(pEFlags);
18050 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18051 IEM_MC_ADVANCE_RIP();
18052 IEM_MC_END();
18053 return VINF_SUCCESS;
18054 }
18055
18056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18057 }
18058 }
18059 else
18060 {
18061 /* memory access. */
18062 switch (pVCpu->iem.s.enmEffOpSize)
18063 {
18064 case IEMMODE_16BIT:
18065 {
18066 IEM_MC_BEGIN(3, 2);
18067 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18068 IEM_MC_ARG(uint16_t, u16Src, 1);
18069 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18071
18072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18073 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18074 IEM_MC_ASSIGN(u16Src, u16Imm);
18075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18076 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18077 IEM_MC_FETCH_EFLAGS(EFlags);
18078 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18079
18080 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18081 IEM_MC_COMMIT_EFLAGS(EFlags);
18082 IEM_MC_ADVANCE_RIP();
18083 IEM_MC_END();
18084 return VINF_SUCCESS;
18085 }
18086
18087 case IEMMODE_32BIT:
18088 {
18089 IEM_MC_BEGIN(3, 2);
18090 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18091 IEM_MC_ARG(uint32_t, u32Src, 1);
18092 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18094
18095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18096 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18097 IEM_MC_ASSIGN(u32Src, u32Imm);
18098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18099 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18100 IEM_MC_FETCH_EFLAGS(EFlags);
18101 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18102
18103 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18104 IEM_MC_COMMIT_EFLAGS(EFlags);
18105 IEM_MC_ADVANCE_RIP();
18106 IEM_MC_END();
18107 return VINF_SUCCESS;
18108 }
18109
18110 case IEMMODE_64BIT:
18111 {
18112 IEM_MC_BEGIN(3, 2);
18113 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18114 IEM_MC_ARG(uint64_t, u64Src, 1);
18115 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18117
18118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18119 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18120 IEM_MC_ASSIGN(u64Src, u64Imm);
18121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18122 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18123 IEM_MC_FETCH_EFLAGS(EFlags);
18124 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18125
18126 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18127 IEM_MC_COMMIT_EFLAGS(EFlags);
18128 IEM_MC_ADVANCE_RIP();
18129 IEM_MC_END();
18130 return VINF_SUCCESS;
18131 }
18132
18133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18134 }
18135 }
18136}
18137
18138
18139/** Opcode 0xf6 /4, /5, /6 and /7. */
18140FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18141{
18142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18143 {
18144 /* register access */
18145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18146 IEM_MC_BEGIN(3, 1);
18147 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18148 IEM_MC_ARG(uint8_t, u8Value, 1);
18149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18150 IEM_MC_LOCAL(int32_t, rc);
18151
18152 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18153 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18154 IEM_MC_REF_EFLAGS(pEFlags);
18155 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18156 IEM_MC_IF_LOCAL_IS_Z(rc) {
18157 IEM_MC_ADVANCE_RIP();
18158 } IEM_MC_ELSE() {
18159 IEM_MC_RAISE_DIVIDE_ERROR();
18160 } IEM_MC_ENDIF();
18161
18162 IEM_MC_END();
18163 }
18164 else
18165 {
18166 /* memory access. */
18167 IEM_MC_BEGIN(3, 2);
18168 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18169 IEM_MC_ARG(uint8_t, u8Value, 1);
18170 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18172 IEM_MC_LOCAL(int32_t, rc);
18173
18174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18176 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18177 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18178 IEM_MC_REF_EFLAGS(pEFlags);
18179 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18180 IEM_MC_IF_LOCAL_IS_Z(rc) {
18181 IEM_MC_ADVANCE_RIP();
18182 } IEM_MC_ELSE() {
18183 IEM_MC_RAISE_DIVIDE_ERROR();
18184 } IEM_MC_ENDIF();
18185
18186 IEM_MC_END();
18187 }
18188 return VINF_SUCCESS;
18189}
18190
18191
18192/** Opcode 0xf7 /4, /5, /6 and /7. */
18193FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18194{
18195 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18196
18197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18198 {
18199 /* register access */
18200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18201 switch (pVCpu->iem.s.enmEffOpSize)
18202 {
18203 case IEMMODE_16BIT:
18204 {
18205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18206 IEM_MC_BEGIN(4, 1);
18207 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18208 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18209 IEM_MC_ARG(uint16_t, u16Value, 2);
18210 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18211 IEM_MC_LOCAL(int32_t, rc);
18212
18213 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18214 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18215 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18216 IEM_MC_REF_EFLAGS(pEFlags);
18217 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18218 IEM_MC_IF_LOCAL_IS_Z(rc) {
18219 IEM_MC_ADVANCE_RIP();
18220 } IEM_MC_ELSE() {
18221 IEM_MC_RAISE_DIVIDE_ERROR();
18222 } IEM_MC_ENDIF();
18223
18224 IEM_MC_END();
18225 return VINF_SUCCESS;
18226 }
18227
18228 case IEMMODE_32BIT:
18229 {
18230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18231 IEM_MC_BEGIN(4, 1);
18232 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18233 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18234 IEM_MC_ARG(uint32_t, u32Value, 2);
18235 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18236 IEM_MC_LOCAL(int32_t, rc);
18237
18238 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18239 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18240 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18241 IEM_MC_REF_EFLAGS(pEFlags);
18242 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18243 IEM_MC_IF_LOCAL_IS_Z(rc) {
18244 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18245 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18246 IEM_MC_ADVANCE_RIP();
18247 } IEM_MC_ELSE() {
18248 IEM_MC_RAISE_DIVIDE_ERROR();
18249 } IEM_MC_ENDIF();
18250
18251 IEM_MC_END();
18252 return VINF_SUCCESS;
18253 }
18254
18255 case IEMMODE_64BIT:
18256 {
18257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18258 IEM_MC_BEGIN(4, 1);
18259 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18260 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18261 IEM_MC_ARG(uint64_t, u64Value, 2);
18262 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18263 IEM_MC_LOCAL(int32_t, rc);
18264
18265 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18266 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18267 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18268 IEM_MC_REF_EFLAGS(pEFlags);
18269 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18270 IEM_MC_IF_LOCAL_IS_Z(rc) {
18271 IEM_MC_ADVANCE_RIP();
18272 } IEM_MC_ELSE() {
18273 IEM_MC_RAISE_DIVIDE_ERROR();
18274 } IEM_MC_ENDIF();
18275
18276 IEM_MC_END();
18277 return VINF_SUCCESS;
18278 }
18279
18280 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18281 }
18282 }
18283 else
18284 {
18285 /* memory access. */
18286 switch (pVCpu->iem.s.enmEffOpSize)
18287 {
18288 case IEMMODE_16BIT:
18289 {
18290 IEM_MC_BEGIN(4, 2);
18291 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18292 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18293 IEM_MC_ARG(uint16_t, u16Value, 2);
18294 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18296 IEM_MC_LOCAL(int32_t, rc);
18297
18298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18300 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18301 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18302 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18303 IEM_MC_REF_EFLAGS(pEFlags);
18304 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18305 IEM_MC_IF_LOCAL_IS_Z(rc) {
18306 IEM_MC_ADVANCE_RIP();
18307 } IEM_MC_ELSE() {
18308 IEM_MC_RAISE_DIVIDE_ERROR();
18309 } IEM_MC_ENDIF();
18310
18311 IEM_MC_END();
18312 return VINF_SUCCESS;
18313 }
18314
18315 case IEMMODE_32BIT:
18316 {
18317 IEM_MC_BEGIN(4, 2);
18318 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18319 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18320 IEM_MC_ARG(uint32_t, u32Value, 2);
18321 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18323 IEM_MC_LOCAL(int32_t, rc);
18324
18325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18327 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18328 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18329 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18330 IEM_MC_REF_EFLAGS(pEFlags);
18331 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18332 IEM_MC_IF_LOCAL_IS_Z(rc) {
18333 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18334 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18335 IEM_MC_ADVANCE_RIP();
18336 } IEM_MC_ELSE() {
18337 IEM_MC_RAISE_DIVIDE_ERROR();
18338 } IEM_MC_ENDIF();
18339
18340 IEM_MC_END();
18341 return VINF_SUCCESS;
18342 }
18343
18344 case IEMMODE_64BIT:
18345 {
18346 IEM_MC_BEGIN(4, 2);
18347 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18348 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18349 IEM_MC_ARG(uint64_t, u64Value, 2);
18350 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18352 IEM_MC_LOCAL(int32_t, rc);
18353
18354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18356 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18357 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18358 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18359 IEM_MC_REF_EFLAGS(pEFlags);
18360 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18361 IEM_MC_IF_LOCAL_IS_Z(rc) {
18362 IEM_MC_ADVANCE_RIP();
18363 } IEM_MC_ELSE() {
18364 IEM_MC_RAISE_DIVIDE_ERROR();
18365 } IEM_MC_ENDIF();
18366
18367 IEM_MC_END();
18368 return VINF_SUCCESS;
18369 }
18370
18371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18372 }
18373 }
18374}
18375
18376/** Opcode 0xf6. */
18377FNIEMOP_DEF(iemOp_Grp3_Eb)
18378{
18379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18380 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18381 {
18382 case 0:
18383 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18384 case 1:
18385/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18386 return IEMOP_RAISE_INVALID_OPCODE();
18387 case 2:
18388 IEMOP_MNEMONIC(not_Eb, "not Eb");
18389 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18390 case 3:
18391 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18392 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18393 case 4:
18394 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18395 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18396 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18397 case 5:
18398 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18399 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18400 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18401 case 6:
18402 IEMOP_MNEMONIC(div_Eb, "div Eb");
18403 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18404 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18405 case 7:
18406 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18407 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18408 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18410 }
18411}
18412
18413
18414/** Opcode 0xf7. */
18415FNIEMOP_DEF(iemOp_Grp3_Ev)
18416{
18417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18418 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18419 {
18420 case 0:
18421 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18422 case 1:
18423/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18424 return IEMOP_RAISE_INVALID_OPCODE();
18425 case 2:
18426 IEMOP_MNEMONIC(not_Ev, "not Ev");
18427 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18428 case 3:
18429 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18430 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18431 case 4:
18432 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18433 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18434 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18435 case 5:
18436 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18437 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18438 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18439 case 6:
18440 IEMOP_MNEMONIC(div_Ev, "div Ev");
18441 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18442 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18443 case 7:
18444 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18445 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18446 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18448 }
18449}
18450
18451
18452/** Opcode 0xf8. */
18453FNIEMOP_DEF(iemOp_clc)
18454{
18455 IEMOP_MNEMONIC(clc, "clc");
18456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18457 IEM_MC_BEGIN(0, 0);
18458 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18459 IEM_MC_ADVANCE_RIP();
18460 IEM_MC_END();
18461 return VINF_SUCCESS;
18462}
18463
18464
18465/** Opcode 0xf9. */
18466FNIEMOP_DEF(iemOp_stc)
18467{
18468 IEMOP_MNEMONIC(stc, "stc");
18469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18470 IEM_MC_BEGIN(0, 0);
18471 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18472 IEM_MC_ADVANCE_RIP();
18473 IEM_MC_END();
18474 return VINF_SUCCESS;
18475}
18476
18477
18478/** Opcode 0xfa. */
18479FNIEMOP_DEF(iemOp_cli)
18480{
18481 IEMOP_MNEMONIC(cli, "cli");
18482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18483 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18484}
18485
18486
18487FNIEMOP_DEF(iemOp_sti)
18488{
18489 IEMOP_MNEMONIC(sti, "sti");
18490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18491 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18492}
18493
18494
18495/** Opcode 0xfc. */
18496FNIEMOP_DEF(iemOp_cld)
18497{
18498 IEMOP_MNEMONIC(cld, "cld");
18499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18500 IEM_MC_BEGIN(0, 0);
18501 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18502 IEM_MC_ADVANCE_RIP();
18503 IEM_MC_END();
18504 return VINF_SUCCESS;
18505}
18506
18507
18508/** Opcode 0xfd. */
18509FNIEMOP_DEF(iemOp_std)
18510{
18511 IEMOP_MNEMONIC(std, "std");
18512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18513 IEM_MC_BEGIN(0, 0);
18514 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18515 IEM_MC_ADVANCE_RIP();
18516 IEM_MC_END();
18517 return VINF_SUCCESS;
18518}
18519
18520
18521/** Opcode 0xfe. */
18522FNIEMOP_DEF(iemOp_Grp4)
18523{
18524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18525 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18526 {
18527 case 0:
18528 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18529 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18530 case 1:
18531 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18532 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18533 default:
18534 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18535 return IEMOP_RAISE_INVALID_OPCODE();
18536 }
18537}
18538
18539
18540/**
18541 * Opcode 0xff /2.
18542 * @param bRm The RM byte.
18543 */
18544FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18545{
18546 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18547 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18548
18549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18550 {
18551 /* The new RIP is taken from a register. */
18552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18553 switch (pVCpu->iem.s.enmEffOpSize)
18554 {
18555 case IEMMODE_16BIT:
18556 IEM_MC_BEGIN(1, 0);
18557 IEM_MC_ARG(uint16_t, u16Target, 0);
18558 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18559 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18560 IEM_MC_END()
18561 return VINF_SUCCESS;
18562
18563 case IEMMODE_32BIT:
18564 IEM_MC_BEGIN(1, 0);
18565 IEM_MC_ARG(uint32_t, u32Target, 0);
18566 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18567 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18568 IEM_MC_END()
18569 return VINF_SUCCESS;
18570
18571 case IEMMODE_64BIT:
18572 IEM_MC_BEGIN(1, 0);
18573 IEM_MC_ARG(uint64_t, u64Target, 0);
18574 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18575 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18576 IEM_MC_END()
18577 return VINF_SUCCESS;
18578
18579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18580 }
18581 }
18582 else
18583 {
18584 /* The new RIP is taken from a register. */
18585 switch (pVCpu->iem.s.enmEffOpSize)
18586 {
18587 case IEMMODE_16BIT:
18588 IEM_MC_BEGIN(1, 1);
18589 IEM_MC_ARG(uint16_t, u16Target, 0);
18590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18593 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18594 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18595 IEM_MC_END()
18596 return VINF_SUCCESS;
18597
18598 case IEMMODE_32BIT:
18599 IEM_MC_BEGIN(1, 1);
18600 IEM_MC_ARG(uint32_t, u32Target, 0);
18601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18604 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18605 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18606 IEM_MC_END()
18607 return VINF_SUCCESS;
18608
18609 case IEMMODE_64BIT:
18610 IEM_MC_BEGIN(1, 1);
18611 IEM_MC_ARG(uint64_t, u64Target, 0);
18612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18615 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18616 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18617 IEM_MC_END()
18618 return VINF_SUCCESS;
18619
18620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18621 }
18622 }
18623}
18624
18625typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18626
18627FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18628{
18629 /* Registers? How?? */
18630 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18631 { /* likely */ }
18632 else
18633 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18634
18635 /* Far pointer loaded from memory. */
18636 switch (pVCpu->iem.s.enmEffOpSize)
18637 {
18638 case IEMMODE_16BIT:
18639 IEM_MC_BEGIN(3, 1);
18640 IEM_MC_ARG(uint16_t, u16Sel, 0);
18641 IEM_MC_ARG(uint16_t, offSeg, 1);
18642 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18646 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18647 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18648 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18649 IEM_MC_END();
18650 return VINF_SUCCESS;
18651
18652 case IEMMODE_64BIT:
18653 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18654 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18655 * and call far qword [rsp] encodings. */
18656 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18657 {
18658 IEM_MC_BEGIN(3, 1);
18659 IEM_MC_ARG(uint16_t, u16Sel, 0);
18660 IEM_MC_ARG(uint64_t, offSeg, 1);
18661 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18665 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18666 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18667 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18668 IEM_MC_END();
18669 return VINF_SUCCESS;
18670 }
18671 /* AMD falls thru. */
18672 /* fall thru */
18673
18674 case IEMMODE_32BIT:
18675 IEM_MC_BEGIN(3, 1);
18676 IEM_MC_ARG(uint16_t, u16Sel, 0);
18677 IEM_MC_ARG(uint32_t, offSeg, 1);
18678 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18682 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18683 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18684 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18685 IEM_MC_END();
18686 return VINF_SUCCESS;
18687
18688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18689 }
18690}
18691
18692
18693/**
18694 * Opcode 0xff /3.
18695 * @param bRm The RM byte.
18696 */
18697FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18698{
18699 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18700 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18701}
18702
18703
18704/**
18705 * Opcode 0xff /4.
18706 * @param bRm The RM byte.
18707 */
18708FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18709{
18710 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18711 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18712
18713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18714 {
18715 /* The new RIP is taken from a register. */
18716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18717 switch (pVCpu->iem.s.enmEffOpSize)
18718 {
18719 case IEMMODE_16BIT:
18720 IEM_MC_BEGIN(0, 1);
18721 IEM_MC_LOCAL(uint16_t, u16Target);
18722 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18723 IEM_MC_SET_RIP_U16(u16Target);
18724 IEM_MC_END()
18725 return VINF_SUCCESS;
18726
18727 case IEMMODE_32BIT:
18728 IEM_MC_BEGIN(0, 1);
18729 IEM_MC_LOCAL(uint32_t, u32Target);
18730 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18731 IEM_MC_SET_RIP_U32(u32Target);
18732 IEM_MC_END()
18733 return VINF_SUCCESS;
18734
18735 case IEMMODE_64BIT:
18736 IEM_MC_BEGIN(0, 1);
18737 IEM_MC_LOCAL(uint64_t, u64Target);
18738 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18739 IEM_MC_SET_RIP_U64(u64Target);
18740 IEM_MC_END()
18741 return VINF_SUCCESS;
18742
18743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18744 }
18745 }
18746 else
18747 {
18748 /* The new RIP is taken from a memory location. */
18749 switch (pVCpu->iem.s.enmEffOpSize)
18750 {
18751 case IEMMODE_16BIT:
18752 IEM_MC_BEGIN(0, 2);
18753 IEM_MC_LOCAL(uint16_t, u16Target);
18754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18757 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18758 IEM_MC_SET_RIP_U16(u16Target);
18759 IEM_MC_END()
18760 return VINF_SUCCESS;
18761
18762 case IEMMODE_32BIT:
18763 IEM_MC_BEGIN(0, 2);
18764 IEM_MC_LOCAL(uint32_t, u32Target);
18765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18768 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18769 IEM_MC_SET_RIP_U32(u32Target);
18770 IEM_MC_END()
18771 return VINF_SUCCESS;
18772
18773 case IEMMODE_64BIT:
18774 IEM_MC_BEGIN(0, 2);
18775 IEM_MC_LOCAL(uint64_t, u64Target);
18776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18779 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18780 IEM_MC_SET_RIP_U64(u64Target);
18781 IEM_MC_END()
18782 return VINF_SUCCESS;
18783
18784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18785 }
18786 }
18787}
18788
18789
18790/**
18791 * Opcode 0xff /5.
18792 * @param bRm The RM byte.
18793 */
18794FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18795{
18796 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18797 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18798}
18799
18800
18801/**
18802 * Opcode 0xff /6.
18803 * @param bRm The RM byte.
18804 */
18805FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18806{
18807 IEMOP_MNEMONIC(push_Ev, "push Ev");
18808
18809 /* Registers are handled by a common worker. */
18810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18811 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18812
18813 /* Memory we do here. */
18814 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18815 switch (pVCpu->iem.s.enmEffOpSize)
18816 {
18817 case IEMMODE_16BIT:
18818 IEM_MC_BEGIN(0, 2);
18819 IEM_MC_LOCAL(uint16_t, u16Src);
18820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18823 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18824 IEM_MC_PUSH_U16(u16Src);
18825 IEM_MC_ADVANCE_RIP();
18826 IEM_MC_END();
18827 return VINF_SUCCESS;
18828
18829 case IEMMODE_32BIT:
18830 IEM_MC_BEGIN(0, 2);
18831 IEM_MC_LOCAL(uint32_t, u32Src);
18832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18835 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18836 IEM_MC_PUSH_U32(u32Src);
18837 IEM_MC_ADVANCE_RIP();
18838 IEM_MC_END();
18839 return VINF_SUCCESS;
18840
18841 case IEMMODE_64BIT:
18842 IEM_MC_BEGIN(0, 2);
18843 IEM_MC_LOCAL(uint64_t, u64Src);
18844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18847 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18848 IEM_MC_PUSH_U64(u64Src);
18849 IEM_MC_ADVANCE_RIP();
18850 IEM_MC_END();
18851 return VINF_SUCCESS;
18852
18853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18854 }
18855}
18856
18857
18858/** Opcode 0xff. */
18859FNIEMOP_DEF(iemOp_Grp5)
18860{
18861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18862 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18863 {
18864 case 0:
18865 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18866 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18867 case 1:
18868 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18869 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18870 case 2:
18871 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18872 case 3:
18873 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18874 case 4:
18875 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18876 case 5:
18877 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18878 case 6:
18879 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18880 case 7:
18881 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18882 return IEMOP_RAISE_INVALID_OPCODE();
18883 }
18884 AssertFailedReturn(VERR_IEM_IPE_3);
18885}
18886
18887
18888
18889const PFNIEMOP g_apfnOneByteMap[256] =
18890{
18891 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18892 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18893 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18894 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18895 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18896 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18897 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18898 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18899 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18900 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18901 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18902 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18903 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18904 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18905 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18906 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18907 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18908 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18909 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18910 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18911 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18912 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18913 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18914 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18915 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18916 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18917 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18918 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18919 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18920 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18921 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18922 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18923 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18924 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18925 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18926 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18927 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18928 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18929 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18930 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18931 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18932 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18933 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18934 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18935 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18936 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18937 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18938 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18939 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18940 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18941 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18942 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18943 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18944 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18945 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18946 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18947 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18948 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18949 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18950 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18951 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18952 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18953 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18954 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18955};
18956
18957
18958/** @} */
18959
18960#ifdef _MSC_VER
18961# pragma warning(pop)
18962#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette