VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65749

Last change on this file since 65749 was 65749, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x6b split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 665.7 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65749 2017-02-13 08:14:21Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
560 {
561 RTGCPTR GCPtrEff;
562 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
563 if (rcStrict != VINF_SUCCESS)
564 return rcStrict;
565 }
566#endif
567 IEMOP_HLP_DONE_DECODING();
568 }
569 return IEMOP_RAISE_INVALID_OPCODE();
570}
571
572
573/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
574 * immediate. */
575FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
576{
577 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
578 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
579 {
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
581#ifndef TST_IEM_CHECK_MC
582 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
583 {
584 RTGCPTR GCPtrEff;
585 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
586 if (rcStrict != VINF_SUCCESS)
587 return rcStrict;
588 }
589#endif
590 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
591 IEMOP_HLP_DONE_DECODING();
592 }
593 return IEMOP_RAISE_INVALID_OPCODE();
594}
595
596
597/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
598 * sequence. */
599FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
600{
601 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
603 {
604 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
606#ifndef TST_IEM_CHECK_MC
607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
608 {
609 RTGCPTR GCPtrEff;
610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
611 if (rcStrict != VINF_SUCCESS)
612 return rcStrict;
613 }
614#endif
615 IEMOP_HLP_DONE_DECODING();
616 }
617 return IEMOP_RAISE_INVALID_OPCODE();
618}
619
620
621/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
622 * a 8-byte immediate. */
623FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
624{
625 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
626 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
627 {
628 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
630#ifndef TST_IEM_CHECK_MC
631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
632 {
633 RTGCPTR GCPtrEff;
634 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
635 if (rcStrict != VINF_SUCCESS)
636 return rcStrict;
637 }
638#endif
639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
640 IEMOP_HLP_DONE_DECODING();
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643}
644
645
646
647/** @name ..... opcodes.
648 *
649 * @{
650 */
651
652/** @} */
653
654
655/** @name Two byte opcodes (first byte 0x0f).
656 *
657 * @{
658 */
659
660/** Opcode 0x0f 0x00 /0. */
661FNIEMOPRM_DEF(iemOp_Grp6_sldt)
662{
663 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
664 IEMOP_HLP_MIN_286();
665 IEMOP_HLP_NO_REAL_OR_V86_MODE();
666
667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
668 {
669 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
670 switch (pVCpu->iem.s.enmEffOpSize)
671 {
672 case IEMMODE_16BIT:
673 IEM_MC_BEGIN(0, 1);
674 IEM_MC_LOCAL(uint16_t, u16Ldtr);
675 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
676 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
677 IEM_MC_ADVANCE_RIP();
678 IEM_MC_END();
679 break;
680
681 case IEMMODE_32BIT:
682 IEM_MC_BEGIN(0, 1);
683 IEM_MC_LOCAL(uint32_t, u32Ldtr);
684 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
685 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
686 IEM_MC_ADVANCE_RIP();
687 IEM_MC_END();
688 break;
689
690 case IEMMODE_64BIT:
691 IEM_MC_BEGIN(0, 1);
692 IEM_MC_LOCAL(uint64_t, u64Ldtr);
693 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
694 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
695 IEM_MC_ADVANCE_RIP();
696 IEM_MC_END();
697 break;
698
699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
700 }
701 }
702 else
703 {
704 IEM_MC_BEGIN(0, 2);
705 IEM_MC_LOCAL(uint16_t, u16Ldtr);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
708 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
709 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
710 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
711 IEM_MC_ADVANCE_RIP();
712 IEM_MC_END();
713 }
714 return VINF_SUCCESS;
715}
716
717
718/** Opcode 0x0f 0x00 /1. */
719FNIEMOPRM_DEF(iemOp_Grp6_str)
720{
721 IEMOP_MNEMONIC(str, "str Rv/Mw");
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 switch (pVCpu->iem.s.enmEffOpSize)
729 {
730 case IEMMODE_16BIT:
731 IEM_MC_BEGIN(0, 1);
732 IEM_MC_LOCAL(uint16_t, u16Tr);
733 IEM_MC_FETCH_TR_U16(u16Tr);
734 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
735 IEM_MC_ADVANCE_RIP();
736 IEM_MC_END();
737 break;
738
739 case IEMMODE_32BIT:
740 IEM_MC_BEGIN(0, 1);
741 IEM_MC_LOCAL(uint32_t, u32Tr);
742 IEM_MC_FETCH_TR_U32(u32Tr);
743 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
744 IEM_MC_ADVANCE_RIP();
745 IEM_MC_END();
746 break;
747
748 case IEMMODE_64BIT:
749 IEM_MC_BEGIN(0, 1);
750 IEM_MC_LOCAL(uint64_t, u64Tr);
751 IEM_MC_FETCH_TR_U64(u64Tr);
752 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
753 IEM_MC_ADVANCE_RIP();
754 IEM_MC_END();
755 break;
756
757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
758 }
759 }
760 else
761 {
762 IEM_MC_BEGIN(0, 2);
763 IEM_MC_LOCAL(uint16_t, u16Tr);
764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
766 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
767 IEM_MC_FETCH_TR_U16(u16Tr);
768 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 return VINF_SUCCESS;
773}
774
775
776/** Opcode 0x0f 0x00 /2. */
777FNIEMOPRM_DEF(iemOp_Grp6_lldt)
778{
779 IEMOP_MNEMONIC(lldt, "lldt Ew");
780 IEMOP_HLP_MIN_286();
781 IEMOP_HLP_NO_REAL_OR_V86_MODE();
782
783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
784 {
785 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
786 IEM_MC_BEGIN(1, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 0);
788 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 else
793 {
794 IEM_MC_BEGIN(1, 1);
795 IEM_MC_ARG(uint16_t, u16Sel, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
799 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
800 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
802 IEM_MC_END();
803 }
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x00 /3. */
809FNIEMOPRM_DEF(iemOp_Grp6_ltr)
810{
811 IEMOP_MNEMONIC(ltr, "ltr Ew");
812 IEMOP_HLP_MIN_286();
813 IEMOP_HLP_NO_REAL_OR_V86_MODE();
814
815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
816 {
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_BEGIN(1, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 0);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 else
825 {
826 IEM_MC_BEGIN(1, 1);
827 IEM_MC_ARG(uint16_t, u16Sel, 0);
828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
832 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
833 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
834 IEM_MC_END();
835 }
836 return VINF_SUCCESS;
837}
838
839
840/** Opcode 0x0f 0x00 /3. */
841FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
842{
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_NO_REAL_OR_V86_MODE();
845
846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
847 {
848 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Sel, 0);
851 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
852 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 else
857 {
858 IEM_MC_BEGIN(2, 1);
859 IEM_MC_ARG(uint16_t, u16Sel, 0);
860 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
863 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
864 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
866 IEM_MC_END();
867 }
868 return VINF_SUCCESS;
869}
870
871
872/** Opcode 0x0f 0x00 /4. */
873FNIEMOPRM_DEF(iemOp_Grp6_verr)
874{
875 IEMOP_MNEMONIC(verr, "verr Ew");
876 IEMOP_HLP_MIN_286();
877 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
878}
879
880
881/** Opcode 0x0f 0x00 /5. */
882FNIEMOPRM_DEF(iemOp_Grp6_verw)
883{
884 IEMOP_MNEMONIC(verw, "verw Ew");
885 IEMOP_HLP_MIN_286();
886 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
887}
888
889
890/**
891 * Group 6 jump table.
892 */
893IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
894{
895 iemOp_Grp6_sldt,
896 iemOp_Grp6_str,
897 iemOp_Grp6_lldt,
898 iemOp_Grp6_ltr,
899 iemOp_Grp6_verr,
900 iemOp_Grp6_verw,
901 iemOp_InvalidWithRM,
902 iemOp_InvalidWithRM
903};
904
905/** Opcode 0x0f 0x00. */
906FNIEMOP_DEF(iemOp_Grp6)
907{
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
910}
911
912
913/** Opcode 0x0f 0x01 /0. */
914FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
915{
916 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
917 IEMOP_HLP_MIN_286();
918 IEMOP_HLP_64BIT_OP_SIZE();
919 IEM_MC_BEGIN(2, 1);
920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
925 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 /0. */
932FNIEMOP_DEF(iemOp_Grp7_vmcall)
933{
934 IEMOP_BITCH_ABOUT_STUB();
935 return IEMOP_RAISE_INVALID_OPCODE();
936}
937
938
939/** Opcode 0x0f 0x01 /0. */
940FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
941{
942 IEMOP_BITCH_ABOUT_STUB();
943 return IEMOP_RAISE_INVALID_OPCODE();
944}
945
946
947/** Opcode 0x0f 0x01 /0. */
948FNIEMOP_DEF(iemOp_Grp7_vmresume)
949{
950 IEMOP_BITCH_ABOUT_STUB();
951 return IEMOP_RAISE_INVALID_OPCODE();
952}
953
954
955/** Opcode 0x0f 0x01 /0. */
956FNIEMOP_DEF(iemOp_Grp7_vmxoff)
957{
958 IEMOP_BITCH_ABOUT_STUB();
959 return IEMOP_RAISE_INVALID_OPCODE();
960}
961
962
963/** Opcode 0x0f 0x01 /1. */
964FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
965{
966 IEMOP_MNEMONIC(sidt, "sidt Ms");
967 IEMOP_HLP_MIN_286();
968 IEMOP_HLP_64BIT_OP_SIZE();
969 IEM_MC_BEGIN(2, 1);
970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
971 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
975 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
976 IEM_MC_END();
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /1. */
982FNIEMOP_DEF(iemOp_Grp7_monitor)
983{
984 IEMOP_MNEMONIC(monitor, "monitor");
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
986 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
987}
988
989
990/** Opcode 0x0f 0x01 /1. */
991FNIEMOP_DEF(iemOp_Grp7_mwait)
992{
993 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
996}
997
998
999/** Opcode 0x0f 0x01 /2. */
1000FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1001{
1002 IEMOP_MNEMONIC(lgdt, "lgdt");
1003 IEMOP_HLP_64BIT_OP_SIZE();
1004 IEM_MC_BEGIN(3, 1);
1005 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1006 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1007 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1011 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1012 IEM_MC_END();
1013 return VINF_SUCCESS;
1014}
1015
1016
1017/** Opcode 0x0f 0x01 0xd0. */
1018FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1019{
1020 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1021 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x0f 0x01 0xd1. */
1031FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1032{
1033 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1034 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1035 {
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1037 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1038 }
1039 return IEMOP_RAISE_INVALID_OPCODE();
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /3. */
1044FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1045{
1046 IEMOP_MNEMONIC(lidt, "lidt");
1047 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1048 ? IEMMODE_64BIT
1049 : pVCpu->iem.s.enmEffOpSize;
1050 IEM_MC_BEGIN(3, 1);
1051 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1057 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1058 IEM_MC_END();
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/** Opcode 0x0f 0x01 0xd8. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1065
1066/** Opcode 0x0f 0x01 0xd9. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1068
1069/** Opcode 0x0f 0x01 0xda. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1071
1072/** Opcode 0x0f 0x01 0xdb. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1074
1075/** Opcode 0x0f 0x01 0xdc. */
1076FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1077
1078/** Opcode 0x0f 0x01 0xdd. */
1079FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1080
1081/** Opcode 0x0f 0x01 0xde. */
1082FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1083
1084/** Opcode 0x0f 0x01 0xdf. */
1085FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1086
1087/** Opcode 0x0f 0x01 /4. */
1088FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1089{
1090 IEMOP_MNEMONIC(smsw, "smsw");
1091 IEMOP_HLP_MIN_286();
1092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1093 {
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 switch (pVCpu->iem.s.enmEffOpSize)
1096 {
1097 case IEMMODE_16BIT:
1098 IEM_MC_BEGIN(0, 1);
1099 IEM_MC_LOCAL(uint16_t, u16Tmp);
1100 IEM_MC_FETCH_CR0_U16(u16Tmp);
1101 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1102 { /* likely */ }
1103 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1104 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1105 else
1106 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1107 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 return VINF_SUCCESS;
1111
1112 case IEMMODE_32BIT:
1113 IEM_MC_BEGIN(0, 1);
1114 IEM_MC_LOCAL(uint32_t, u32Tmp);
1115 IEM_MC_FETCH_CR0_U32(u32Tmp);
1116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 return VINF_SUCCESS;
1120
1121 case IEMMODE_64BIT:
1122 IEM_MC_BEGIN(0, 1);
1123 IEM_MC_LOCAL(uint64_t, u64Tmp);
1124 IEM_MC_FETCH_CR0_U64(u64Tmp);
1125 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1126 IEM_MC_ADVANCE_RIP();
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129
1130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1131 }
1132 }
1133 else
1134 {
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(0, 2);
1137 IEM_MC_LOCAL(uint16_t, u16Tmp);
1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_FETCH_CR0_U16(u16Tmp);
1142 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1143 { /* likely */ }
1144 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1145 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1146 else
1147 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 return VINF_SUCCESS;
1152 }
1153}
1154
1155
1156/** Opcode 0x0f 0x01 /6. */
1157FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1158{
1159 /* The operand size is effectively ignored, all is 16-bit and only the
1160 lower 3-bits are used. */
1161 IEMOP_MNEMONIC(lmsw, "lmsw");
1162 IEMOP_HLP_MIN_286();
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(1, 0);
1167 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1168 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1170 IEM_MC_END();
1171 }
1172 else
1173 {
1174 IEM_MC_BEGIN(1, 1);
1175 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1179 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1180 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x01 /7. */
1188FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1189{
1190 IEMOP_MNEMONIC(invlpg, "invlpg");
1191 IEMOP_HLP_MIN_486();
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(1, 1);
1194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1196 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199}
1200
1201
1202/** Opcode 0x0f 0x01 /7. */
1203FNIEMOP_DEF(iemOp_Grp7_swapgs)
1204{
1205 IEMOP_MNEMONIC(swapgs, "swapgs");
1206 IEMOP_HLP_ONLY_64BIT();
1207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1209}
1210
1211
1212/** Opcode 0x0f 0x01 /7. */
1213FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1214{
1215 NOREF(pVCpu);
1216 IEMOP_BITCH_ABOUT_STUB();
1217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1218}
1219
1220
1221/** Opcode 0x0f 0x01. */
1222FNIEMOP_DEF(iemOp_Grp7)
1223{
1224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1226 {
1227 case 0:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1233 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1234 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1235 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1236 }
1237 return IEMOP_RAISE_INVALID_OPCODE();
1238
1239 case 1:
1240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1241 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1242 switch (bRm & X86_MODRM_RM_MASK)
1243 {
1244 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1245 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1246 }
1247 return IEMOP_RAISE_INVALID_OPCODE();
1248
1249 case 2:
1250 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1251 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1252 switch (bRm & X86_MODRM_RM_MASK)
1253 {
1254 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1255 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1256 }
1257 return IEMOP_RAISE_INVALID_OPCODE();
1258
1259 case 3:
1260 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1261 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1262 switch (bRm & X86_MODRM_RM_MASK)
1263 {
1264 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1265 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1266 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1267 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1268 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1269 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1270 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1271 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1273 }
1274
1275 case 4:
1276 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1277
1278 case 5:
1279 return IEMOP_RAISE_INVALID_OPCODE();
1280
1281 case 6:
1282 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1283
1284 case 7:
1285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1286 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1287 switch (bRm & X86_MODRM_RM_MASK)
1288 {
1289 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1290 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1291 }
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296}
1297
1298/** Opcode 0x0f 0x00 /3. */
1299FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1300{
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1303
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1307 switch (pVCpu->iem.s.enmEffOpSize)
1308 {
1309 case IEMMODE_16BIT:
1310 {
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Sel, 1);
1314 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1315
1316 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1318 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1319
1320 IEM_MC_END();
1321 return VINF_SUCCESS;
1322 }
1323
1324 case IEMMODE_32BIT:
1325 case IEMMODE_64BIT:
1326 {
1327 IEM_MC_BEGIN(3, 0);
1328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Sel, 1);
1330 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1331
1332 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1333 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1334 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1335
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1341 }
1342 }
1343 else
1344 {
1345 switch (pVCpu->iem.s.enmEffOpSize)
1346 {
1347 case IEMMODE_16BIT:
1348 {
1349 IEM_MC_BEGIN(3, 1);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Sel, 1);
1352 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1354
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1356 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1357
1358 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1361
1362 IEM_MC_END();
1363 return VINF_SUCCESS;
1364 }
1365
1366 case IEMMODE_32BIT:
1367 case IEMMODE_64BIT:
1368 {
1369 IEM_MC_BEGIN(3, 1);
1370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1371 IEM_MC_ARG(uint16_t, u16Sel, 1);
1372 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1377/** @todo testcase: make sure it's a 16-bit read. */
1378
1379 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1380 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1381 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1382
1383 IEM_MC_END();
1384 return VINF_SUCCESS;
1385 }
1386
1387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1388 }
1389 }
1390}
1391
1392
1393
1394/** Opcode 0x0f 0x02. */
1395FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1396{
1397 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1398 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1399}
1400
1401
1402/** Opcode 0x0f 0x03. */
1403FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1404{
1405 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1406 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1407}
1408
1409
1410/** Opcode 0x0f 0x05. */
1411FNIEMOP_DEF(iemOp_syscall)
1412{
1413 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1416}
1417
1418
1419/** Opcode 0x0f 0x06. */
1420FNIEMOP_DEF(iemOp_clts)
1421{
1422 IEMOP_MNEMONIC(clts, "clts");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1425}
1426
1427
1428/** Opcode 0x0f 0x07. */
1429FNIEMOP_DEF(iemOp_sysret)
1430{
1431 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1434}
1435
1436
1437/** Opcode 0x0f 0x08. */
1438FNIEMOP_STUB(iemOp_invd);
1439// IEMOP_HLP_MIN_486();
1440
1441
1442/** Opcode 0x0f 0x09. */
1443FNIEMOP_DEF(iemOp_wbinvd)
1444{
1445 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1446 IEMOP_HLP_MIN_486();
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS; /* ignore for now */
1453}
1454
1455
1456/** Opcode 0x0f 0x0b. */
1457FNIEMOP_DEF(iemOp_ud2)
1458{
1459 IEMOP_MNEMONIC(ud2, "ud2");
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462
1463/** Opcode 0x0f 0x0d. */
1464FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1465{
1466 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1468 {
1469 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1477 return IEMOP_RAISE_INVALID_OPCODE();
1478 }
1479
1480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1481 {
1482 case 2: /* Aliased to /0 for the time being. */
1483 case 4: /* Aliased to /0 for the time being. */
1484 case 5: /* Aliased to /0 for the time being. */
1485 case 6: /* Aliased to /0 for the time being. */
1486 case 7: /* Aliased to /0 for the time being. */
1487 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1488 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1489 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1491 }
1492
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 /* Currently a NOP. */
1498 NOREF(GCPtrEffSrc);
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 return VINF_SUCCESS;
1502}
1503
1504
1505/** Opcode 0x0f 0x0e. */
1506FNIEMOP_STUB(iemOp_femms);
1507
1508
1509/** Opcode 0x0f 0x0f 0x0c. */
1510FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x0d. */
1513FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x1c. */
1516FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x1d. */
1519FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x8a. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x8e. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x90. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x94. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1532
1533/** Opcode 0x0f 0x0f 0x96. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0x97. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0x9a. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0x9e. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xa0. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xa4. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xa6. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xa7. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xaa. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xae. */
1561FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xb0. */
1564FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xb4. */
1567FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1568
1569/** Opcode 0x0f 0x0f 0xb6. */
1570FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1571
1572/** Opcode 0x0f 0x0f 0xb7. */
1573FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1574
1575/** Opcode 0x0f 0x0f 0xbb. */
1576FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1577
1578/** Opcode 0x0f 0x0f 0xbf. */
1579FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1580
1581
1582/** Opcode 0x0f 0x0f. */
1583FNIEMOP_DEF(iemOp_3Dnow)
1584{
1585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1586 {
1587 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 }
1590
1591 /* This is pretty sparse, use switch instead of table. */
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 switch (b)
1594 {
1595 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1596 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1597 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1598 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1599 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1600 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1601 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1602 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1603 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1604 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1605 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1606 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1607 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1608 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1609 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1610 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1611 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1612 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1613 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1614 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1615 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1616 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1617 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1618 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622}
1623
1624
1625/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1626FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1627/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1628FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1629/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1630FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1631/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1632FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1633
1634
1635/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1636FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1637{
1638 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1646 IEM_MC_BEGIN(0, 0);
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1651 IEM_MC_ADVANCE_RIP();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 /*
1657 * Memory, register.
1658 */
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1679FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1680
1681/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1682FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1683
1684/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1685FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1686{
1687 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1690 {
1691 /*
1692 * Register, register.
1693 */
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697
1698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1700 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1701 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1702
1703 IEM_MC_ADVANCE_RIP();
1704 IEM_MC_END();
1705 }
1706 else
1707 {
1708 /*
1709 * Memory, register.
1710 */
1711 IEM_MC_BEGIN(0, 2);
1712 IEM_MC_LOCAL(uint64_t, uSrc);
1713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1714
1715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1719
1720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1722
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1732
1733/** Opcode 0x66 0x0f 0x12. */
1734FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1735
1736/** Opcode 0xf3 0x0f 0x12. */
1737FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1738
1739/** Opcode 0xf2 0x0f 0x12. */
1740FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1741
1742/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1743FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1744
1745/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1746FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1747{
1748 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752#if 0
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1757 IEM_MC_BEGIN(0, 1);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1761 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765#else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767#endif
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/* Opcode 0xf3 0x0f 0x13 - invalid */
1793/* Opcode 0xf2 0x0f 0x13 - invalid */
1794
1795/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1796FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x14 - invalid */
1800/* Opcode 0xf2 0x0f 0x14 - invalid */
1801/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1802FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1803/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1804FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1805/* Opcode 0xf3 0x0f 0x15 - invalid */
1806/* Opcode 0xf2 0x0f 0x15 - invalid */
1807/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1808FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1809/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1810FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1811/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1812FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1813/* Opcode 0xf2 0x0f 0x16 - invalid */
1814/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1815FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1816/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1817FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1818/* Opcode 0xf3 0x0f 0x17 - invalid */
1819/* Opcode 0xf2 0x0f 0x17 - invalid */
1820
1821
1822/** Opcode 0x0f 0x18. */
1823FNIEMOP_DEF(iemOp_prefetch_Grp16)
1824{
1825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1826 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1827 {
1828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1829 {
1830 case 4: /* Aliased to /0 for the time being according to AMD. */
1831 case 5: /* Aliased to /0 for the time being according to AMD. */
1832 case 6: /* Aliased to /0 for the time being according to AMD. */
1833 case 7: /* Aliased to /0 for the time being according to AMD. */
1834 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1835 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1836 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1837 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1839 }
1840
1841 IEM_MC_BEGIN(0, 1);
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 /* Currently a NOP. */
1846 NOREF(GCPtrEffSrc);
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 return VINF_SUCCESS;
1850 }
1851
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/** Opcode 0x0f 0x19..0x1f. */
1857FNIEMOP_DEF(iemOp_nop_Ev)
1858{
1859 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 IEM_MC_BEGIN(0, 1);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 /* Currently a NOP. */
1875 NOREF(GCPtrEffSrc);
1876 IEM_MC_ADVANCE_RIP();
1877 IEM_MC_END();
1878 }
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** Opcode 0x0f 0x20. */
1884FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1885{
1886 /* mod is ignored, as is operand size overrides. */
1887 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1888 IEMOP_HLP_MIN_386();
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1891 else
1892 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1893
1894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1895 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1897 {
1898 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1900 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1901 iCrReg |= 8;
1902 }
1903 switch (iCrReg)
1904 {
1905 case 0: case 2: case 3: case 4: case 8:
1906 break;
1907 default:
1908 return IEMOP_RAISE_INVALID_OPCODE();
1909 }
1910 IEMOP_HLP_DONE_DECODING();
1911
1912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1913}
1914
1915
1916/** Opcode 0x0f 0x21. */
1917FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1918{
1919 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1920 IEMOP_HLP_MIN_386();
1921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1924 return IEMOP_RAISE_INVALID_OPCODE();
1925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1926 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1927 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1928}
1929
1930
1931/** Opcode 0x0f 0x22. */
1932FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1933{
1934 /* mod is ignored, as is operand size overrides. */
1935 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1936 IEMOP_HLP_MIN_386();
1937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1938 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1939 else
1940 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1941
1942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1943 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1945 {
1946 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1948 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1949 iCrReg |= 8;
1950 }
1951 switch (iCrReg)
1952 {
1953 case 0: case 2: case 3: case 4: case 8:
1954 break;
1955 default:
1956 return IEMOP_RAISE_INVALID_OPCODE();
1957 }
1958 IEMOP_HLP_DONE_DECODING();
1959
1960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1961}
1962
1963
1964/** Opcode 0x0f 0x23. */
1965FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1966{
1967 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1968 IEMOP_HLP_MIN_386();
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1972 return IEMOP_RAISE_INVALID_OPCODE();
1973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1974 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1975 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1976}
1977
1978
1979/** Opcode 0x0f 0x24. */
1980FNIEMOP_DEF(iemOp_mov_Rd_Td)
1981{
1982 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1983 /** @todo works on 386 and 486. */
1984 /* The RM byte is not considered, see testcase. */
1985 return IEMOP_RAISE_INVALID_OPCODE();
1986}
1987
1988
1989/** Opcode 0x0f 0x26. */
1990FNIEMOP_DEF(iemOp_mov_Td_Rd)
1991{
1992 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1993 /** @todo works on 386 and 486. */
1994 /* The RM byte is not considered, see testcase. */
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
2000FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 /*
2007 * Register, register.
2008 */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_BEGIN(0, 0);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2042FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2043{
2044 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2056 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081}
2082
2083/* Opcode 0xf3 0x0f 0x28 - invalid */
2084/* Opcode 0xf2 0x0f 0x28 - invalid */
2085
2086/** Opcode 0x0f 0x29. */
2087FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2088{
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2091 else
2092 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2095 {
2096 /*
2097 * Register, register.
2098 */
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2100 IEM_MC_BEGIN(0, 0);
2101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 else
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2106 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2107 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Memory, register.
2115 */
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2124 else
2125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2127
2128 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2129 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135}
2136
2137
2138/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2139FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2140/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2141FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2142/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2143FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2144/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2145FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2146
2147
2148/** Opcode 0x0f 0x2b. */
2149FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2150{
2151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2152 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2153 else
2154 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2157 {
2158 /*
2159 * memory, register.
2160 */
2161 IEM_MC_BEGIN(0, 2);
2162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 else
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172
2173 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2187FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2188/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2189FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2190/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2191FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2192/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2193FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2194
2195/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2196FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2197/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2198FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2199/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2200FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2201/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2202FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2203
2204/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2205FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2206/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2207FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2208/* Opcode 0xf3 0x0f 0x2e - invalid */
2209/* Opcode 0xf2 0x0f 0x2e - invalid */
2210
2211/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2212FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2213/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2214FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2215/* Opcode 0xf3 0x0f 0x2f - invalid */
2216/* Opcode 0xf2 0x0f 0x2f - invalid */
2217
2218/** Opcode 0x0f 0x30. */
2219FNIEMOP_DEF(iemOp_wrmsr)
2220{
2221 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2224}
2225
2226
2227/** Opcode 0x0f 0x31. */
2228FNIEMOP_DEF(iemOp_rdtsc)
2229{
2230 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2233}
2234
2235
2236/** Opcode 0x0f 0x33. */
2237FNIEMOP_DEF(iemOp_rdmsr)
2238{
2239 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2242}
2243
2244
2245/** Opcode 0x0f 0x34. */
2246FNIEMOP_STUB(iemOp_rdpmc);
2247/** Opcode 0x0f 0x34. */
2248FNIEMOP_STUB(iemOp_sysenter);
2249/** Opcode 0x0f 0x35. */
2250FNIEMOP_STUB(iemOp_sysexit);
2251/** Opcode 0x0f 0x37. */
2252FNIEMOP_STUB(iemOp_getsec);
2253/** Opcode 0x0f 0x38. */
2254FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2255/** Opcode 0x0f 0x3a. */
2256FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2257
2258
2259/**
2260 * Implements a conditional move.
2261 *
2262 * Wish there was an obvious way to do this where we could share and reduce
2263 * code bloat.
2264 *
2265 * @param a_Cnd The conditional "microcode" operation.
2266 */
2267#define CMOV_X(a_Cnd) \
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2270 { \
2271 switch (pVCpu->iem.s.enmEffOpSize) \
2272 { \
2273 case IEMMODE_16BIT: \
2274 IEM_MC_BEGIN(0, 1); \
2275 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2276 a_Cnd { \
2277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 1); \
2286 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2287 a_Cnd { \
2288 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2289 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2290 } IEM_MC_ELSE() { \
2291 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2292 } IEM_MC_ENDIF(); \
2293 IEM_MC_ADVANCE_RIP(); \
2294 IEM_MC_END(); \
2295 return VINF_SUCCESS; \
2296 \
2297 case IEMMODE_64BIT: \
2298 IEM_MC_BEGIN(0, 1); \
2299 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2300 a_Cnd { \
2301 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2303 } IEM_MC_ENDIF(); \
2304 IEM_MC_ADVANCE_RIP(); \
2305 IEM_MC_END(); \
2306 return VINF_SUCCESS; \
2307 \
2308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2309 } \
2310 } \
2311 else \
2312 { \
2313 switch (pVCpu->iem.s.enmEffOpSize) \
2314 { \
2315 case IEMMODE_16BIT: \
2316 IEM_MC_BEGIN(0, 2); \
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2318 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2320 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2321 a_Cnd { \
2322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2323 } IEM_MC_ENDIF(); \
2324 IEM_MC_ADVANCE_RIP(); \
2325 IEM_MC_END(); \
2326 return VINF_SUCCESS; \
2327 \
2328 case IEMMODE_32BIT: \
2329 IEM_MC_BEGIN(0, 2); \
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2331 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2333 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2334 a_Cnd { \
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2336 } IEM_MC_ELSE() { \
2337 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2338 } IEM_MC_ENDIF(); \
2339 IEM_MC_ADVANCE_RIP(); \
2340 IEM_MC_END(); \
2341 return VINF_SUCCESS; \
2342 \
2343 case IEMMODE_64BIT: \
2344 IEM_MC_BEGIN(0, 2); \
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2346 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2349 a_Cnd { \
2350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2351 } IEM_MC_ENDIF(); \
2352 IEM_MC_ADVANCE_RIP(); \
2353 IEM_MC_END(); \
2354 return VINF_SUCCESS; \
2355 \
2356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2357 } \
2358 } do {} while (0)
2359
2360
2361
2362/** Opcode 0x0f 0x40. */
2363FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2364{
2365 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2367}
2368
2369
2370/** Opcode 0x0f 0x41. */
2371FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2372{
2373 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2375}
2376
2377
2378/** Opcode 0x0f 0x42. */
2379FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2380{
2381 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2382 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2383}
2384
2385
2386/** Opcode 0x0f 0x43. */
2387FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2388{
2389 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2390 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2391}
2392
2393
2394/** Opcode 0x0f 0x44. */
2395FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2396{
2397 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2399}
2400
2401
2402/** Opcode 0x0f 0x45. */
2403FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2404{
2405 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2407}
2408
2409
2410/** Opcode 0x0f 0x46. */
2411FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2412{
2413 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2414 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2415}
2416
2417
2418/** Opcode 0x0f 0x47. */
2419FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2420{
2421 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2422 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2423}
2424
2425
2426/** Opcode 0x0f 0x48. */
2427FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2428{
2429 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2430 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2431}
2432
2433
2434/** Opcode 0x0f 0x49. */
2435FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2436{
2437 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2438 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2439}
2440
2441
2442/** Opcode 0x0f 0x4a. */
2443FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2446 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2447}
2448
2449
2450/** Opcode 0x0f 0x4b. */
2451FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2452{
2453 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2455}
2456
2457
2458/** Opcode 0x0f 0x4c. */
2459FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2460{
2461 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2462 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2463}
2464
2465
2466/** Opcode 0x0f 0x4d. */
2467FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2468{
2469 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2470 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2471}
2472
2473
2474/** Opcode 0x0f 0x4e. */
2475FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2476{
2477 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2478 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2479}
2480
2481
2482/** Opcode 0x0f 0x4f. */
2483FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2484{
2485 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2486 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2487}
2488
2489#undef CMOV_X
2490
2491/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2492FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2493/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2494FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2495/* Opcode 0xf3 0x0f 0x50 - invalid */
2496/* Opcode 0xf2 0x0f 0x50 - invalid */
2497
2498/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2499FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2500/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2501FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2502/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2503FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2504/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2505FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2506
2507/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2508FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2509/* Opcode 0x66 0x0f 0x52 - invalid */
2510/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2511FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2512/* Opcode 0xf2 0x0f 0x52 - invalid */
2513
2514/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2515FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2516/* Opcode 0x66 0x0f 0x53 - invalid */
2517/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2518FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2519/* Opcode 0xf2 0x0f 0x53 - invalid */
2520
2521/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2522FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2523/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2524FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2525/* Opcode 0xf3 0x0f 0x54 - invalid */
2526/* Opcode 0xf2 0x0f 0x54 - invalid */
2527
2528/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2529FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2530/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2531FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2532/* Opcode 0xf3 0x0f 0x55 - invalid */
2533/* Opcode 0xf2 0x0f 0x55 - invalid */
2534
2535/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2536FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2537/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2538FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2539/* Opcode 0xf3 0x0f 0x56 - invalid */
2540/* Opcode 0xf2 0x0f 0x56 - invalid */
2541
2542/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2543FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2544/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2545FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2546/* Opcode 0xf3 0x0f 0x57 - invalid */
2547/* Opcode 0xf2 0x0f 0x57 - invalid */
2548
2549/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2568FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2569/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2570FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2571/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2572FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2573/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2574FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2575
2576/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2577FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2578/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2579FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2580/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2581FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2582/* Opcode 0xf2 0x0f 0x5b - invalid */
2583
2584/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2585FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2586/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2587FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2588/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2589FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2590/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2591FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2592
2593/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2594FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2595/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2596FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2597/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2598FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2599/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2600FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2601
2602/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2603FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2604/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2605FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2606/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2607FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2608/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2609FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2610
2611/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2612FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2613/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2614FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2615/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2616FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2617/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2618FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2619
2620/**
2621 * Common worker for MMX instructions on the forms:
2622 * pxxxx mm1, mm2/mem32
2623 *
2624 * The 2nd operand is the first half of a register, which in the memory case
2625 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2626 * memory accessed for MMX.
2627 *
2628 * Exceptions type 4.
2629 */
2630FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 {
2635 /*
2636 * Register, register.
2637 */
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2639 IEM_MC_BEGIN(2, 0);
2640 IEM_MC_ARG(uint128_t *, pDst, 0);
2641 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2643 IEM_MC_PREPARE_SSE_USAGE();
2644 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2645 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2646 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2647 IEM_MC_ADVANCE_RIP();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 /*
2653 * Register, memory.
2654 */
2655 IEM_MC_BEGIN(2, 2);
2656 IEM_MC_ARG(uint128_t *, pDst, 0);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2660
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665
2666 IEM_MC_PREPARE_SSE_USAGE();
2667 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2668 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2669
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674}
2675
2676
2677/**
2678 * Common worker for SSE2 instructions on the forms:
2679 * pxxxx xmm1, xmm2/mem128
2680 *
2681 * The 2nd operand is the first half of a register, which in the memory case
2682 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2683 * memory accessed for MMX.
2684 *
2685 * Exceptions type 4.
2686 */
2687FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2688{
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (!pImpl->pfnU64)
2691 return IEMOP_RAISE_INVALID_OPCODE();
2692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2693 {
2694 /*
2695 * Register, register.
2696 */
2697 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2698 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(2, 0);
2701 IEM_MC_ARG(uint64_t *, pDst, 0);
2702 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_PREPARE_FPU_USAGE();
2705 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2706 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2707 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * Register, memory.
2715 */
2716 IEM_MC_BEGIN(2, 2);
2717 IEM_MC_ARG(uint64_t *, pDst, 0);
2718 IEM_MC_LOCAL(uint32_t, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2721
2722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2725 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726
2727 IEM_MC_PREPARE_FPU_USAGE();
2728 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2729 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2730
2731 IEM_MC_ADVANCE_RIP();
2732 IEM_MC_END();
2733 }
2734 return VINF_SUCCESS;
2735}
2736
2737
2738/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2739FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2740{
2741 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2742 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2743}
2744
2745/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2746FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2747{
2748 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2749 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2750}
2751
2752/* Opcode 0xf3 0x0f 0x60 - invalid */
2753
2754
2755/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2756FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2757{
2758 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2759 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2760}
2761
2762/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2763FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2764{
2765 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2766 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2767}
2768
2769/* Opcode 0xf3 0x0f 0x61 - invalid */
2770
2771
2772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2774{
2775 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2777}
2778
2779/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2780FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2781{
2782 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2783 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2784}
2785
2786/* Opcode 0xf3 0x0f 0x62 - invalid */
2787
2788
2789
2790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2791FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x63 - invalid */
2795
2796/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x64 - invalid */
2801
2802/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2805FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2806/* Opcode 0xf3 0x0f 0x65 - invalid */
2807
2808/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2809FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2811FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2812/* Opcode 0xf3 0x0f 0x66 - invalid */
2813
2814/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2815FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2816/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2818/* Opcode 0xf3 0x0f 0x67 - invalid */
2819
2820
2821/**
2822 * Common worker for MMX instructions on the form:
2823 * pxxxx mm1, mm2/mem64
2824 *
2825 * The 2nd operand is the second half of a register, which in the memory case
2826 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2827 * where it may read the full 128 bits or only the upper 64 bits.
2828 *
2829 * Exceptions type 4.
2830 */
2831FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2832{
2833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2834 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /*
2838 * Register, register.
2839 */
2840 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2841 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEM_MC_BEGIN(2, 0);
2844 IEM_MC_ARG(uint64_t *, pDst, 0);
2845 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2846 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2847 IEM_MC_PREPARE_FPU_USAGE();
2848 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2849 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2850 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 else
2855 {
2856 /*
2857 * Register, memory.
2858 */
2859 IEM_MC_BEGIN(2, 2);
2860 IEM_MC_ARG(uint64_t *, pDst, 0);
2861 IEM_MC_LOCAL(uint64_t, uSrc);
2862 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2864
2865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2868 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2869
2870 IEM_MC_PREPARE_FPU_USAGE();
2871 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2872 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2873
2874 IEM_MC_ADVANCE_RIP();
2875 IEM_MC_END();
2876 }
2877 return VINF_SUCCESS;
2878}
2879
2880
2881/**
2882 * Common worker for SSE2 instructions on the form:
2883 * pxxxx xmm1, xmm2/mem128
2884 *
2885 * The 2nd operand is the second half of a register, which in the memory case
2886 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2887 * where it may read the full 128 bits or only the upper 64 bits.
2888 *
2889 * Exceptions type 4.
2890 */
2891FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2892{
2893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2895 {
2896 /*
2897 * Register, register.
2898 */
2899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2900 IEM_MC_BEGIN(2, 0);
2901 IEM_MC_ARG(uint128_t *, pDst, 0);
2902 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2904 IEM_MC_PREPARE_SSE_USAGE();
2905 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2906 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2907 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2908 IEM_MC_ADVANCE_RIP();
2909 IEM_MC_END();
2910 }
2911 else
2912 {
2913 /*
2914 * Register, memory.
2915 */
2916 IEM_MC_BEGIN(2, 2);
2917 IEM_MC_ARG(uint128_t *, pDst, 0);
2918 IEM_MC_LOCAL(uint128_t, uSrc);
2919 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2921
2922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2924 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2925 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2926
2927 IEM_MC_PREPARE_SSE_USAGE();
2928 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2929 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2939FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2940{
2941 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2942 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2943}
2944
2945/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2946FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2947{
2948 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2949 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2950}
2951/* Opcode 0xf3 0x0f 0x68 - invalid */
2952
2953
2954/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2955FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2956{
2957 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2958 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2959}
2960
2961/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2962FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2963{
2964 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2965 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2966
2967}
2968/* Opcode 0xf3 0x0f 0x69 - invalid */
2969
2970
2971/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2972FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2973{
2974 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2975 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2976}
2977
2978/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2979FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2980{
2981 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2982 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2983}
2984/* Opcode 0xf3 0x0f 0x6a - invalid */
2985
2986
2987/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2988FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2989/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2990FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2991/* Opcode 0xf3 0x0f 0x6b - invalid */
2992
2993
2994/* Opcode 0x0f 0x6c - invalid */
2995
2996/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2997FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2998{
2999 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
3000 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3001}
3002
3003/* Opcode 0xf3 0x0f 0x6c - invalid */
3004/* Opcode 0xf2 0x0f 0x6c - invalid */
3005
3006
3007/* Opcode 0x0f 0x6d - invalid */
3008
3009/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
3010FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3011{
3012 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
3013 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3014}
3015
3016/* Opcode 0xf3 0x0f 0x6d - invalid */
3017
3018
3019
3020/** Opcode 0x0f 0x6e. */
3021FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
3022{
3023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3024 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3025 {
3026 case IEM_OP_PRF_SIZE_OP: /* SSE */
3027 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3028 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
3029 else
3030 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
3031 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3032 {
3033 /* XMM, greg*/
3034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3035 IEM_MC_BEGIN(0, 1);
3036 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3038 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3039 {
3040 IEM_MC_LOCAL(uint64_t, u64Tmp);
3041 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3042 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3043 }
3044 else
3045 {
3046 IEM_MC_LOCAL(uint32_t, u32Tmp);
3047 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3048 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3049 }
3050 IEM_MC_ADVANCE_RIP();
3051 IEM_MC_END();
3052 }
3053 else
3054 {
3055 /* XMM, [mem] */
3056 IEM_MC_BEGIN(0, 2);
3057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3058 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3061 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3062 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3063 {
3064 IEM_MC_LOCAL(uint64_t, u64Tmp);
3065 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3066 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3067 }
3068 else
3069 {
3070 IEM_MC_LOCAL(uint32_t, u32Tmp);
3071 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3072 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3073 }
3074 IEM_MC_ADVANCE_RIP();
3075 IEM_MC_END();
3076 }
3077 return VINF_SUCCESS;
3078
3079 case 0: /* MMX */
3080 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3081 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3082 else
3083 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3085 {
3086 /* MMX, greg */
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEM_MC_BEGIN(0, 1);
3089 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3090 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3091 IEM_MC_LOCAL(uint64_t, u64Tmp);
3092 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3093 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3094 else
3095 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3096 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 else
3101 {
3102 /* MMX, [mem] */
3103 IEM_MC_BEGIN(0, 2);
3104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3105 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3108 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3110 {
3111 IEM_MC_LOCAL(uint64_t, u64Tmp);
3112 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3113 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3114 }
3115 else
3116 {
3117 IEM_MC_LOCAL(uint32_t, u32Tmp);
3118 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3119 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3120 }
3121 IEM_MC_ADVANCE_RIP();
3122 IEM_MC_END();
3123 }
3124 return VINF_SUCCESS;
3125
3126 default:
3127 return IEMOP_RAISE_INVALID_OPCODE();
3128 }
3129}
3130
3131
3132/** Opcode 0x0f 0x6f. */
3133FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
3134{
3135 bool fAligned = false;
3136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3137 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3138 {
3139 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3140 fAligned = true;
3141 /* fall thru */
3142 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3143 if (fAligned)
3144 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3145 else
3146 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3148 {
3149 /*
3150 * Register, register.
3151 */
3152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3153 IEM_MC_BEGIN(0, 0);
3154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3156 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3157 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3158 IEM_MC_ADVANCE_RIP();
3159 IEM_MC_END();
3160 }
3161 else
3162 {
3163 /*
3164 * Register, memory.
3165 */
3166 IEM_MC_BEGIN(0, 2);
3167 IEM_MC_LOCAL(uint128_t, u128Tmp);
3168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3169
3170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3172 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3173 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3174 if (fAligned)
3175 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3176 else
3177 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3178 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3179
3180 IEM_MC_ADVANCE_RIP();
3181 IEM_MC_END();
3182 }
3183 return VINF_SUCCESS;
3184
3185 case 0: /* MMX */
3186 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3188 {
3189 /*
3190 * Register, register.
3191 */
3192 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3193 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_BEGIN(0, 1);
3196 IEM_MC_LOCAL(uint64_t, u64Tmp);
3197 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3198 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3199 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3200 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3201 IEM_MC_ADVANCE_RIP();
3202 IEM_MC_END();
3203 }
3204 else
3205 {
3206 /*
3207 * Register, memory.
3208 */
3209 IEM_MC_BEGIN(0, 2);
3210 IEM_MC_LOCAL(uint64_t, u64Tmp);
3211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3212
3213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3215 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3216 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3217 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3218 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3219
3220 IEM_MC_ADVANCE_RIP();
3221 IEM_MC_END();
3222 }
3223 return VINF_SUCCESS;
3224
3225 default:
3226 return IEMOP_RAISE_INVALID_OPCODE();
3227 }
3228}
3229
3230
3231/** Opcode 0x0f 0x70. The immediate here is evil! */
3232FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
3233{
3234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3235 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3236 {
3237 case IEM_OP_PRF_SIZE_OP: /* SSE */
3238 case IEM_OP_PRF_REPNZ: /* SSE */
3239 case IEM_OP_PRF_REPZ: /* SSE */
3240 {
3241 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
3242 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3243 {
3244 case IEM_OP_PRF_SIZE_OP:
3245 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
3246 pfnAImpl = iemAImpl_pshufd;
3247 break;
3248 case IEM_OP_PRF_REPNZ:
3249 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
3250 pfnAImpl = iemAImpl_pshuflw;
3251 break;
3252 case IEM_OP_PRF_REPZ:
3253 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
3254 pfnAImpl = iemAImpl_pshufhw;
3255 break;
3256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3257 }
3258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3259 {
3260 /*
3261 * Register, register.
3262 */
3263 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3265
3266 IEM_MC_BEGIN(3, 0);
3267 IEM_MC_ARG(uint128_t *, pDst, 0);
3268 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3269 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3270 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3271 IEM_MC_PREPARE_SSE_USAGE();
3272 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3273 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3274 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3275 IEM_MC_ADVANCE_RIP();
3276 IEM_MC_END();
3277 }
3278 else
3279 {
3280 /*
3281 * Register, memory.
3282 */
3283 IEM_MC_BEGIN(3, 2);
3284 IEM_MC_ARG(uint128_t *, pDst, 0);
3285 IEM_MC_LOCAL(uint128_t, uSrc);
3286 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3288
3289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3290 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3291 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3293 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3294
3295 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3296 IEM_MC_PREPARE_SSE_USAGE();
3297 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3298 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3299
3300 IEM_MC_ADVANCE_RIP();
3301 IEM_MC_END();
3302 }
3303 return VINF_SUCCESS;
3304 }
3305
3306 case 0: /* MMX Extension */
3307 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3309 {
3310 /*
3311 * Register, register.
3312 */
3313 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3315
3316 IEM_MC_BEGIN(3, 0);
3317 IEM_MC_ARG(uint64_t *, pDst, 0);
3318 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3319 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3320 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3321 IEM_MC_PREPARE_FPU_USAGE();
3322 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3323 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3324 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3325 IEM_MC_ADVANCE_RIP();
3326 IEM_MC_END();
3327 }
3328 else
3329 {
3330 /*
3331 * Register, memory.
3332 */
3333 IEM_MC_BEGIN(3, 2);
3334 IEM_MC_ARG(uint64_t *, pDst, 0);
3335 IEM_MC_LOCAL(uint64_t, uSrc);
3336 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3338
3339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3340 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3341 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3343 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3344
3345 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3346 IEM_MC_PREPARE_FPU_USAGE();
3347 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3348 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3349
3350 IEM_MC_ADVANCE_RIP();
3351 IEM_MC_END();
3352 }
3353 return VINF_SUCCESS;
3354
3355 default:
3356 return IEMOP_RAISE_INVALID_OPCODE();
3357 }
3358}
3359
3360
3361/** Opcode 0x0f 0x71 11/2. */
3362FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3363
3364/** Opcode 0x66 0x0f 0x71 11/2. */
3365FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3366
3367/** Opcode 0x0f 0x71 11/4. */
3368FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3369
3370/** Opcode 0x66 0x0f 0x71 11/4. */
3371FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3372
3373/** Opcode 0x0f 0x71 11/6. */
3374FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3375
3376/** Opcode 0x66 0x0f 0x71 11/6. */
3377FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3378
3379
3380/** Opcode 0x0f 0x71. */
3381FNIEMOP_DEF(iemOp_Grp12)
3382{
3383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3384 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3385 return IEMOP_RAISE_INVALID_OPCODE();
3386 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3387 {
3388 case 0: case 1: case 3: case 5: case 7:
3389 return IEMOP_RAISE_INVALID_OPCODE();
3390 case 2:
3391 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3392 {
3393 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3394 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3395 default: return IEMOP_RAISE_INVALID_OPCODE();
3396 }
3397 case 4:
3398 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3399 {
3400 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3401 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3402 default: return IEMOP_RAISE_INVALID_OPCODE();
3403 }
3404 case 6:
3405 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3406 {
3407 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3408 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3409 default: return IEMOP_RAISE_INVALID_OPCODE();
3410 }
3411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3412 }
3413}
3414
3415
3416/** Opcode 0x0f 0x72 11/2. */
3417FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3418
3419/** Opcode 0x66 0x0f 0x72 11/2. */
3420FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3421
3422/** Opcode 0x0f 0x72 11/4. */
3423FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3424
3425/** Opcode 0x66 0x0f 0x72 11/4. */
3426FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3427
3428/** Opcode 0x0f 0x72 11/6. */
3429FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3430
3431/** Opcode 0x66 0x0f 0x72 11/6. */
3432FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3433
3434
3435/** Opcode 0x0f 0x72. */
3436FNIEMOP_DEF(iemOp_Grp13)
3437{
3438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3439 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3440 return IEMOP_RAISE_INVALID_OPCODE();
3441 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3442 {
3443 case 0: case 1: case 3: case 5: case 7:
3444 return IEMOP_RAISE_INVALID_OPCODE();
3445 case 2:
3446 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3447 {
3448 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3449 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3450 default: return IEMOP_RAISE_INVALID_OPCODE();
3451 }
3452 case 4:
3453 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3454 {
3455 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3456 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3457 default: return IEMOP_RAISE_INVALID_OPCODE();
3458 }
3459 case 6:
3460 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3461 {
3462 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3463 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3464 default: return IEMOP_RAISE_INVALID_OPCODE();
3465 }
3466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3467 }
3468}
3469
3470
3471/** Opcode 0x0f 0x73 11/2. */
3472FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3473
3474/** Opcode 0x66 0x0f 0x73 11/2. */
3475FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3476
3477/** Opcode 0x66 0x0f 0x73 11/3. */
3478FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3479
3480/** Opcode 0x0f 0x73 11/6. */
3481FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3482
3483/** Opcode 0x66 0x0f 0x73 11/6. */
3484FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3485
3486/** Opcode 0x66 0x0f 0x73 11/7. */
3487FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3488
3489
3490/** Opcode 0x0f 0x73. */
3491FNIEMOP_DEF(iemOp_Grp14)
3492{
3493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3494 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3495 return IEMOP_RAISE_INVALID_OPCODE();
3496 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3497 {
3498 case 0: case 1: case 4: case 5:
3499 return IEMOP_RAISE_INVALID_OPCODE();
3500 case 2:
3501 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3502 {
3503 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3504 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3505 default: return IEMOP_RAISE_INVALID_OPCODE();
3506 }
3507 case 3:
3508 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3509 {
3510 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3511 default: return IEMOP_RAISE_INVALID_OPCODE();
3512 }
3513 case 6:
3514 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3515 {
3516 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3517 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3518 default: return IEMOP_RAISE_INVALID_OPCODE();
3519 }
3520 case 7:
3521 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3522 {
3523 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3524 default: return IEMOP_RAISE_INVALID_OPCODE();
3525 }
3526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3527 }
3528}
3529
3530
3531/**
3532 * Common worker for SSE2 and MMX instructions on the forms:
3533 * pxxx mm1, mm2/mem64
3534 * pxxx xmm1, xmm2/mem128
3535 *
3536 * Proper alignment of the 128-bit operand is enforced.
3537 * Exceptions type 4. SSE2 and MMX cpuid checks.
3538 */
3539FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3540{
3541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3542 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3543 {
3544 case IEM_OP_PRF_SIZE_OP: /* SSE */
3545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3546 {
3547 /*
3548 * Register, register.
3549 */
3550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3551 IEM_MC_BEGIN(2, 0);
3552 IEM_MC_ARG(uint128_t *, pDst, 0);
3553 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3554 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3555 IEM_MC_PREPARE_SSE_USAGE();
3556 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3557 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3558 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3559 IEM_MC_ADVANCE_RIP();
3560 IEM_MC_END();
3561 }
3562 else
3563 {
3564 /*
3565 * Register, memory.
3566 */
3567 IEM_MC_BEGIN(2, 2);
3568 IEM_MC_ARG(uint128_t *, pDst, 0);
3569 IEM_MC_LOCAL(uint128_t, uSrc);
3570 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3572
3573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3575 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3576 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3577
3578 IEM_MC_PREPARE_SSE_USAGE();
3579 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3580 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3581
3582 IEM_MC_ADVANCE_RIP();
3583 IEM_MC_END();
3584 }
3585 return VINF_SUCCESS;
3586
3587 case 0: /* MMX */
3588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3589 {
3590 /*
3591 * Register, register.
3592 */
3593 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3594 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596 IEM_MC_BEGIN(2, 0);
3597 IEM_MC_ARG(uint64_t *, pDst, 0);
3598 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3599 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3600 IEM_MC_PREPARE_FPU_USAGE();
3601 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3602 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3603 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3604 IEM_MC_ADVANCE_RIP();
3605 IEM_MC_END();
3606 }
3607 else
3608 {
3609 /*
3610 * Register, memory.
3611 */
3612 IEM_MC_BEGIN(2, 2);
3613 IEM_MC_ARG(uint64_t *, pDst, 0);
3614 IEM_MC_LOCAL(uint64_t, uSrc);
3615 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3617
3618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3620 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3621 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3622
3623 IEM_MC_PREPARE_FPU_USAGE();
3624 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3625 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3626
3627 IEM_MC_ADVANCE_RIP();
3628 IEM_MC_END();
3629 }
3630 return VINF_SUCCESS;
3631
3632 default:
3633 return IEMOP_RAISE_INVALID_OPCODE();
3634 }
3635}
3636
3637
3638/** Opcode 0x0f 0x74. */
3639FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3640{
3641 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3642 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3643}
3644
3645
3646/** Opcode 0x0f 0x75. */
3647FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3648{
3649 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3650 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3651}
3652
3653
3654/** Opcode 0x0f 0x76. */
3655FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3656{
3657 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3658 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3659}
3660
3661
3662/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3663FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3664/* Opcode 0x66 0x0f 0x77 - invalid */
3665/* Opcode 0xf3 0x0f 0x77 - invalid */
3666/* Opcode 0xf2 0x0f 0x77 - invalid */
3667
3668/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3669FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3670/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3671FNIEMOP_STUB(iemOp_AmdGrp17);
3672/* Opcode 0xf3 0x0f 0x78 - invalid */
3673/* Opcode 0xf2 0x0f 0x78 - invalid */
3674
3675/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3676FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3677/* Opcode 0x66 0x0f 0x79 - invalid */
3678/* Opcode 0xf3 0x0f 0x79 - invalid */
3679/* Opcode 0xf2 0x0f 0x79 - invalid */
3680
3681/* Opcode 0x0f 0x7a - invalid */
3682/* Opcode 0x66 0x0f 0x7a - invalid */
3683/* Opcode 0xf3 0x0f 0x7a - invalid */
3684/* Opcode 0xf2 0x0f 0x7a - invalid */
3685
3686/* Opcode 0x0f 0x7b - invalid */
3687/* Opcode 0x66 0x0f 0x7b - invalid */
3688/* Opcode 0xf3 0x0f 0x7b - invalid */
3689/* Opcode 0xf2 0x0f 0x7b - invalid */
3690
3691/* Opcode 0x0f 0x7c - invalid */
3692/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3693FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3694/* Opcode 0xf3 0x0f 0x7c - invalid */
3695/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3696FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3697
3698/* Opcode 0x0f 0x7d - invalid */
3699/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3700FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3701/* Opcode 0xf3 0x0f 0x7d - invalid */
3702/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3703FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3704
3705
3706/** Opcode 0x0f 0x7e. */
3707FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3708{
3709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3710 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3711 {
3712 case IEM_OP_PRF_SIZE_OP: /* SSE */
3713 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3714 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3715 else
3716 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3718 {
3719 /* greg, XMM */
3720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3721 IEM_MC_BEGIN(0, 1);
3722 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3723 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3724 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3725 {
3726 IEM_MC_LOCAL(uint64_t, u64Tmp);
3727 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3728 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3729 }
3730 else
3731 {
3732 IEM_MC_LOCAL(uint32_t, u32Tmp);
3733 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3734 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3735 }
3736 IEM_MC_ADVANCE_RIP();
3737 IEM_MC_END();
3738 }
3739 else
3740 {
3741 /* [mem], XMM */
3742 IEM_MC_BEGIN(0, 2);
3743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3744 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3747 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3748 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3749 {
3750 IEM_MC_LOCAL(uint64_t, u64Tmp);
3751 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3752 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3753 }
3754 else
3755 {
3756 IEM_MC_LOCAL(uint32_t, u32Tmp);
3757 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3758 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3759 }
3760 IEM_MC_ADVANCE_RIP();
3761 IEM_MC_END();
3762 }
3763 return VINF_SUCCESS;
3764
3765 case 0: /* MMX */
3766 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3767 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3768 else
3769 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3771 {
3772 /* greg, MMX */
3773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3774 IEM_MC_BEGIN(0, 1);
3775 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3776 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3777 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3778 {
3779 IEM_MC_LOCAL(uint64_t, u64Tmp);
3780 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3781 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3782 }
3783 else
3784 {
3785 IEM_MC_LOCAL(uint32_t, u32Tmp);
3786 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3787 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3788 }
3789 IEM_MC_ADVANCE_RIP();
3790 IEM_MC_END();
3791 }
3792 else
3793 {
3794 /* [mem], MMX */
3795 IEM_MC_BEGIN(0, 2);
3796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3797 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3800 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3801 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3802 {
3803 IEM_MC_LOCAL(uint64_t, u64Tmp);
3804 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3805 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3806 }
3807 else
3808 {
3809 IEM_MC_LOCAL(uint32_t, u32Tmp);
3810 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3811 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3812 }
3813 IEM_MC_ADVANCE_RIP();
3814 IEM_MC_END();
3815 }
3816 return VINF_SUCCESS;
3817
3818 default:
3819 return IEMOP_RAISE_INVALID_OPCODE();
3820 }
3821}
3822
3823
3824/** Opcode 0x0f 0x7f. */
3825FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3826{
3827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3828 bool fAligned = false;
3829 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3830 {
3831 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3832 fAligned = true;
3833 /* fall thru */
3834 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3835 if (fAligned)
3836 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3837 else
3838 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3840 {
3841 /*
3842 * Register, register.
3843 */
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845 IEM_MC_BEGIN(0, 0);
3846 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3847 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3848 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3849 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3850 IEM_MC_ADVANCE_RIP();
3851 IEM_MC_END();
3852 }
3853 else
3854 {
3855 /*
3856 * Register, memory.
3857 */
3858 IEM_MC_BEGIN(0, 2);
3859 IEM_MC_LOCAL(uint128_t, u128Tmp);
3860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3861
3862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3864 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3866
3867 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3868 if (fAligned)
3869 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3870 else
3871 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3872
3873 IEM_MC_ADVANCE_RIP();
3874 IEM_MC_END();
3875 }
3876 return VINF_SUCCESS;
3877
3878 case 0: /* MMX */
3879 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3880
3881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3882 {
3883 /*
3884 * Register, register.
3885 */
3886 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3887 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3889 IEM_MC_BEGIN(0, 1);
3890 IEM_MC_LOCAL(uint64_t, u64Tmp);
3891 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3892 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3893 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3894 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3895 IEM_MC_ADVANCE_RIP();
3896 IEM_MC_END();
3897 }
3898 else
3899 {
3900 /*
3901 * Register, memory.
3902 */
3903 IEM_MC_BEGIN(0, 2);
3904 IEM_MC_LOCAL(uint64_t, u64Tmp);
3905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3906
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3909 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3910 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3911
3912 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3913 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3914
3915 IEM_MC_ADVANCE_RIP();
3916 IEM_MC_END();
3917 }
3918 return VINF_SUCCESS;
3919
3920 default:
3921 return IEMOP_RAISE_INVALID_OPCODE();
3922 }
3923}
3924
3925
3926
3927/** Opcode 0x0f 0x80. */
3928FNIEMOP_DEF(iemOp_jo_Jv)
3929{
3930 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3931 IEMOP_HLP_MIN_386();
3932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3933 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3934 {
3935 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3937
3938 IEM_MC_BEGIN(0, 0);
3939 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3940 IEM_MC_REL_JMP_S16(i16Imm);
3941 } IEM_MC_ELSE() {
3942 IEM_MC_ADVANCE_RIP();
3943 } IEM_MC_ENDIF();
3944 IEM_MC_END();
3945 }
3946 else
3947 {
3948 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3950
3951 IEM_MC_BEGIN(0, 0);
3952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3953 IEM_MC_REL_JMP_S32(i32Imm);
3954 } IEM_MC_ELSE() {
3955 IEM_MC_ADVANCE_RIP();
3956 } IEM_MC_ENDIF();
3957 IEM_MC_END();
3958 }
3959 return VINF_SUCCESS;
3960}
3961
3962
3963/** Opcode 0x0f 0x81. */
3964FNIEMOP_DEF(iemOp_jno_Jv)
3965{
3966 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3967 IEMOP_HLP_MIN_386();
3968 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3969 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3970 {
3971 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3973
3974 IEM_MC_BEGIN(0, 0);
3975 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3976 IEM_MC_ADVANCE_RIP();
3977 } IEM_MC_ELSE() {
3978 IEM_MC_REL_JMP_S16(i16Imm);
3979 } IEM_MC_ENDIF();
3980 IEM_MC_END();
3981 }
3982 else
3983 {
3984 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3986
3987 IEM_MC_BEGIN(0, 0);
3988 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3989 IEM_MC_ADVANCE_RIP();
3990 } IEM_MC_ELSE() {
3991 IEM_MC_REL_JMP_S32(i32Imm);
3992 } IEM_MC_ENDIF();
3993 IEM_MC_END();
3994 }
3995 return VINF_SUCCESS;
3996}
3997
3998
3999/** Opcode 0x0f 0x82. */
4000FNIEMOP_DEF(iemOp_jc_Jv)
4001{
4002 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4003 IEMOP_HLP_MIN_386();
4004 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4005 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4006 {
4007 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4009
4010 IEM_MC_BEGIN(0, 0);
4011 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4012 IEM_MC_REL_JMP_S16(i16Imm);
4013 } IEM_MC_ELSE() {
4014 IEM_MC_ADVANCE_RIP();
4015 } IEM_MC_ENDIF();
4016 IEM_MC_END();
4017 }
4018 else
4019 {
4020 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4022
4023 IEM_MC_BEGIN(0, 0);
4024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4025 IEM_MC_REL_JMP_S32(i32Imm);
4026 } IEM_MC_ELSE() {
4027 IEM_MC_ADVANCE_RIP();
4028 } IEM_MC_ENDIF();
4029 IEM_MC_END();
4030 }
4031 return VINF_SUCCESS;
4032}
4033
4034
4035/** Opcode 0x0f 0x83. */
4036FNIEMOP_DEF(iemOp_jnc_Jv)
4037{
4038 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4039 IEMOP_HLP_MIN_386();
4040 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4041 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4042 {
4043 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4045
4046 IEM_MC_BEGIN(0, 0);
4047 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4048 IEM_MC_ADVANCE_RIP();
4049 } IEM_MC_ELSE() {
4050 IEM_MC_REL_JMP_S16(i16Imm);
4051 } IEM_MC_ENDIF();
4052 IEM_MC_END();
4053 }
4054 else
4055 {
4056 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4058
4059 IEM_MC_BEGIN(0, 0);
4060 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4061 IEM_MC_ADVANCE_RIP();
4062 } IEM_MC_ELSE() {
4063 IEM_MC_REL_JMP_S32(i32Imm);
4064 } IEM_MC_ENDIF();
4065 IEM_MC_END();
4066 }
4067 return VINF_SUCCESS;
4068}
4069
4070
4071/** Opcode 0x0f 0x84. */
4072FNIEMOP_DEF(iemOp_je_Jv)
4073{
4074 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4075 IEMOP_HLP_MIN_386();
4076 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4077 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4078 {
4079 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4081
4082 IEM_MC_BEGIN(0, 0);
4083 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4084 IEM_MC_REL_JMP_S16(i16Imm);
4085 } IEM_MC_ELSE() {
4086 IEM_MC_ADVANCE_RIP();
4087 } IEM_MC_ENDIF();
4088 IEM_MC_END();
4089 }
4090 else
4091 {
4092 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4094
4095 IEM_MC_BEGIN(0, 0);
4096 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4097 IEM_MC_REL_JMP_S32(i32Imm);
4098 } IEM_MC_ELSE() {
4099 IEM_MC_ADVANCE_RIP();
4100 } IEM_MC_ENDIF();
4101 IEM_MC_END();
4102 }
4103 return VINF_SUCCESS;
4104}
4105
4106
4107/** Opcode 0x0f 0x85. */
4108FNIEMOP_DEF(iemOp_jne_Jv)
4109{
4110 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4111 IEMOP_HLP_MIN_386();
4112 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4113 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4114 {
4115 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4117
4118 IEM_MC_BEGIN(0, 0);
4119 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4120 IEM_MC_ADVANCE_RIP();
4121 } IEM_MC_ELSE() {
4122 IEM_MC_REL_JMP_S16(i16Imm);
4123 } IEM_MC_ENDIF();
4124 IEM_MC_END();
4125 }
4126 else
4127 {
4128 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4130
4131 IEM_MC_BEGIN(0, 0);
4132 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4133 IEM_MC_ADVANCE_RIP();
4134 } IEM_MC_ELSE() {
4135 IEM_MC_REL_JMP_S32(i32Imm);
4136 } IEM_MC_ENDIF();
4137 IEM_MC_END();
4138 }
4139 return VINF_SUCCESS;
4140}
4141
4142
4143/** Opcode 0x0f 0x86. */
4144FNIEMOP_DEF(iemOp_jbe_Jv)
4145{
4146 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4147 IEMOP_HLP_MIN_386();
4148 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4149 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4150 {
4151 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4153
4154 IEM_MC_BEGIN(0, 0);
4155 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4156 IEM_MC_REL_JMP_S16(i16Imm);
4157 } IEM_MC_ELSE() {
4158 IEM_MC_ADVANCE_RIP();
4159 } IEM_MC_ENDIF();
4160 IEM_MC_END();
4161 }
4162 else
4163 {
4164 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4166
4167 IEM_MC_BEGIN(0, 0);
4168 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4169 IEM_MC_REL_JMP_S32(i32Imm);
4170 } IEM_MC_ELSE() {
4171 IEM_MC_ADVANCE_RIP();
4172 } IEM_MC_ENDIF();
4173 IEM_MC_END();
4174 }
4175 return VINF_SUCCESS;
4176}
4177
4178
4179/** Opcode 0x0f 0x87. */
4180FNIEMOP_DEF(iemOp_jnbe_Jv)
4181{
4182 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4183 IEMOP_HLP_MIN_386();
4184 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4185 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4186 {
4187 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4189
4190 IEM_MC_BEGIN(0, 0);
4191 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4192 IEM_MC_ADVANCE_RIP();
4193 } IEM_MC_ELSE() {
4194 IEM_MC_REL_JMP_S16(i16Imm);
4195 } IEM_MC_ENDIF();
4196 IEM_MC_END();
4197 }
4198 else
4199 {
4200 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4202
4203 IEM_MC_BEGIN(0, 0);
4204 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4205 IEM_MC_ADVANCE_RIP();
4206 } IEM_MC_ELSE() {
4207 IEM_MC_REL_JMP_S32(i32Imm);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_END();
4210 }
4211 return VINF_SUCCESS;
4212}
4213
4214
4215/** Opcode 0x0f 0x88. */
4216FNIEMOP_DEF(iemOp_js_Jv)
4217{
4218 IEMOP_MNEMONIC(js_Jv, "js Jv");
4219 IEMOP_HLP_MIN_386();
4220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4221 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4222 {
4223 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4225
4226 IEM_MC_BEGIN(0, 0);
4227 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4228 IEM_MC_REL_JMP_S16(i16Imm);
4229 } IEM_MC_ELSE() {
4230 IEM_MC_ADVANCE_RIP();
4231 } IEM_MC_ENDIF();
4232 IEM_MC_END();
4233 }
4234 else
4235 {
4236 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4238
4239 IEM_MC_BEGIN(0, 0);
4240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4241 IEM_MC_REL_JMP_S32(i32Imm);
4242 } IEM_MC_ELSE() {
4243 IEM_MC_ADVANCE_RIP();
4244 } IEM_MC_ENDIF();
4245 IEM_MC_END();
4246 }
4247 return VINF_SUCCESS;
4248}
4249
4250
4251/** Opcode 0x0f 0x89. */
4252FNIEMOP_DEF(iemOp_jns_Jv)
4253{
4254 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4255 IEMOP_HLP_MIN_386();
4256 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4257 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4258 {
4259 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4261
4262 IEM_MC_BEGIN(0, 0);
4263 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4264 IEM_MC_ADVANCE_RIP();
4265 } IEM_MC_ELSE() {
4266 IEM_MC_REL_JMP_S16(i16Imm);
4267 } IEM_MC_ENDIF();
4268 IEM_MC_END();
4269 }
4270 else
4271 {
4272 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4274
4275 IEM_MC_BEGIN(0, 0);
4276 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4277 IEM_MC_ADVANCE_RIP();
4278 } IEM_MC_ELSE() {
4279 IEM_MC_REL_JMP_S32(i32Imm);
4280 } IEM_MC_ENDIF();
4281 IEM_MC_END();
4282 }
4283 return VINF_SUCCESS;
4284}
4285
4286
4287/** Opcode 0x0f 0x8a. */
4288FNIEMOP_DEF(iemOp_jp_Jv)
4289{
4290 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4291 IEMOP_HLP_MIN_386();
4292 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4293 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4294 {
4295 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4297
4298 IEM_MC_BEGIN(0, 0);
4299 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4300 IEM_MC_REL_JMP_S16(i16Imm);
4301 } IEM_MC_ELSE() {
4302 IEM_MC_ADVANCE_RIP();
4303 } IEM_MC_ENDIF();
4304 IEM_MC_END();
4305 }
4306 else
4307 {
4308 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4310
4311 IEM_MC_BEGIN(0, 0);
4312 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4313 IEM_MC_REL_JMP_S32(i32Imm);
4314 } IEM_MC_ELSE() {
4315 IEM_MC_ADVANCE_RIP();
4316 } IEM_MC_ENDIF();
4317 IEM_MC_END();
4318 }
4319 return VINF_SUCCESS;
4320}
4321
4322
4323/** Opcode 0x0f 0x8b. */
4324FNIEMOP_DEF(iemOp_jnp_Jv)
4325{
4326 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4327 IEMOP_HLP_MIN_386();
4328 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4329 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4330 {
4331 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4333
4334 IEM_MC_BEGIN(0, 0);
4335 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4336 IEM_MC_ADVANCE_RIP();
4337 } IEM_MC_ELSE() {
4338 IEM_MC_REL_JMP_S16(i16Imm);
4339 } IEM_MC_ENDIF();
4340 IEM_MC_END();
4341 }
4342 else
4343 {
4344 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4346
4347 IEM_MC_BEGIN(0, 0);
4348 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4349 IEM_MC_ADVANCE_RIP();
4350 } IEM_MC_ELSE() {
4351 IEM_MC_REL_JMP_S32(i32Imm);
4352 } IEM_MC_ENDIF();
4353 IEM_MC_END();
4354 }
4355 return VINF_SUCCESS;
4356}
4357
4358
4359/** Opcode 0x0f 0x8c. */
4360FNIEMOP_DEF(iemOp_jl_Jv)
4361{
4362 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4363 IEMOP_HLP_MIN_386();
4364 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4365 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4366 {
4367 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4369
4370 IEM_MC_BEGIN(0, 0);
4371 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4372 IEM_MC_REL_JMP_S16(i16Imm);
4373 } IEM_MC_ELSE() {
4374 IEM_MC_ADVANCE_RIP();
4375 } IEM_MC_ENDIF();
4376 IEM_MC_END();
4377 }
4378 else
4379 {
4380 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4382
4383 IEM_MC_BEGIN(0, 0);
4384 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4385 IEM_MC_REL_JMP_S32(i32Imm);
4386 } IEM_MC_ELSE() {
4387 IEM_MC_ADVANCE_RIP();
4388 } IEM_MC_ENDIF();
4389 IEM_MC_END();
4390 }
4391 return VINF_SUCCESS;
4392}
4393
4394
4395/** Opcode 0x0f 0x8d. */
4396FNIEMOP_DEF(iemOp_jnl_Jv)
4397{
4398 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4399 IEMOP_HLP_MIN_386();
4400 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4401 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4402 {
4403 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4405
4406 IEM_MC_BEGIN(0, 0);
4407 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4408 IEM_MC_ADVANCE_RIP();
4409 } IEM_MC_ELSE() {
4410 IEM_MC_REL_JMP_S16(i16Imm);
4411 } IEM_MC_ENDIF();
4412 IEM_MC_END();
4413 }
4414 else
4415 {
4416 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4418
4419 IEM_MC_BEGIN(0, 0);
4420 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4421 IEM_MC_ADVANCE_RIP();
4422 } IEM_MC_ELSE() {
4423 IEM_MC_REL_JMP_S32(i32Imm);
4424 } IEM_MC_ENDIF();
4425 IEM_MC_END();
4426 }
4427 return VINF_SUCCESS;
4428}
4429
4430
4431/** Opcode 0x0f 0x8e. */
4432FNIEMOP_DEF(iemOp_jle_Jv)
4433{
4434 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4435 IEMOP_HLP_MIN_386();
4436 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4437 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4438 {
4439 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4441
4442 IEM_MC_BEGIN(0, 0);
4443 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4444 IEM_MC_REL_JMP_S16(i16Imm);
4445 } IEM_MC_ELSE() {
4446 IEM_MC_ADVANCE_RIP();
4447 } IEM_MC_ENDIF();
4448 IEM_MC_END();
4449 }
4450 else
4451 {
4452 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4454
4455 IEM_MC_BEGIN(0, 0);
4456 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4457 IEM_MC_REL_JMP_S32(i32Imm);
4458 } IEM_MC_ELSE() {
4459 IEM_MC_ADVANCE_RIP();
4460 } IEM_MC_ENDIF();
4461 IEM_MC_END();
4462 }
4463 return VINF_SUCCESS;
4464}
4465
4466
4467/** Opcode 0x0f 0x8f. */
4468FNIEMOP_DEF(iemOp_jnle_Jv)
4469{
4470 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4471 IEMOP_HLP_MIN_386();
4472 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4473 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4474 {
4475 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4477
4478 IEM_MC_BEGIN(0, 0);
4479 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4480 IEM_MC_ADVANCE_RIP();
4481 } IEM_MC_ELSE() {
4482 IEM_MC_REL_JMP_S16(i16Imm);
4483 } IEM_MC_ENDIF();
4484 IEM_MC_END();
4485 }
4486 else
4487 {
4488 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4490
4491 IEM_MC_BEGIN(0, 0);
4492 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4493 IEM_MC_ADVANCE_RIP();
4494 } IEM_MC_ELSE() {
4495 IEM_MC_REL_JMP_S32(i32Imm);
4496 } IEM_MC_ENDIF();
4497 IEM_MC_END();
4498 }
4499 return VINF_SUCCESS;
4500}
4501
4502
4503/** Opcode 0x0f 0x90. */
4504FNIEMOP_DEF(iemOp_seto_Eb)
4505{
4506 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4507 IEMOP_HLP_MIN_386();
4508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4509
4510 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4511 * any way. AMD says it's "unused", whatever that means. We're
4512 * ignoring for now. */
4513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4514 {
4515 /* register target */
4516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4517 IEM_MC_BEGIN(0, 0);
4518 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4519 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4520 } IEM_MC_ELSE() {
4521 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4522 } IEM_MC_ENDIF();
4523 IEM_MC_ADVANCE_RIP();
4524 IEM_MC_END();
4525 }
4526 else
4527 {
4528 /* memory target */
4529 IEM_MC_BEGIN(0, 1);
4530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4533 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4534 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4535 } IEM_MC_ELSE() {
4536 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4537 } IEM_MC_ENDIF();
4538 IEM_MC_ADVANCE_RIP();
4539 IEM_MC_END();
4540 }
4541 return VINF_SUCCESS;
4542}
4543
4544
4545/** Opcode 0x0f 0x91. */
4546FNIEMOP_DEF(iemOp_setno_Eb)
4547{
4548 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4549 IEMOP_HLP_MIN_386();
4550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4551
4552 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4553 * any way. AMD says it's "unused", whatever that means. We're
4554 * ignoring for now. */
4555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4556 {
4557 /* register target */
4558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4559 IEM_MC_BEGIN(0, 0);
4560 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4561 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4562 } IEM_MC_ELSE() {
4563 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4564 } IEM_MC_ENDIF();
4565 IEM_MC_ADVANCE_RIP();
4566 IEM_MC_END();
4567 }
4568 else
4569 {
4570 /* memory target */
4571 IEM_MC_BEGIN(0, 1);
4572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4576 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4577 } IEM_MC_ELSE() {
4578 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4579 } IEM_MC_ENDIF();
4580 IEM_MC_ADVANCE_RIP();
4581 IEM_MC_END();
4582 }
4583 return VINF_SUCCESS;
4584}
4585
4586
4587/** Opcode 0x0f 0x92. */
4588FNIEMOP_DEF(iemOp_setc_Eb)
4589{
4590 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4591 IEMOP_HLP_MIN_386();
4592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4593
4594 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4595 * any way. AMD says it's "unused", whatever that means. We're
4596 * ignoring for now. */
4597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4598 {
4599 /* register target */
4600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4601 IEM_MC_BEGIN(0, 0);
4602 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4603 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4604 } IEM_MC_ELSE() {
4605 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4606 } IEM_MC_ENDIF();
4607 IEM_MC_ADVANCE_RIP();
4608 IEM_MC_END();
4609 }
4610 else
4611 {
4612 /* memory target */
4613 IEM_MC_BEGIN(0, 1);
4614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4617 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4618 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4619 } IEM_MC_ELSE() {
4620 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4621 } IEM_MC_ENDIF();
4622 IEM_MC_ADVANCE_RIP();
4623 IEM_MC_END();
4624 }
4625 return VINF_SUCCESS;
4626}
4627
4628
4629/** Opcode 0x0f 0x93. */
4630FNIEMOP_DEF(iemOp_setnc_Eb)
4631{
4632 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4633 IEMOP_HLP_MIN_386();
4634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4635
4636 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4637 * any way. AMD says it's "unused", whatever that means. We're
4638 * ignoring for now. */
4639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4640 {
4641 /* register target */
4642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4643 IEM_MC_BEGIN(0, 0);
4644 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4645 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4646 } IEM_MC_ELSE() {
4647 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4648 } IEM_MC_ENDIF();
4649 IEM_MC_ADVANCE_RIP();
4650 IEM_MC_END();
4651 }
4652 else
4653 {
4654 /* memory target */
4655 IEM_MC_BEGIN(0, 1);
4656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4659 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4660 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4661 } IEM_MC_ELSE() {
4662 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4663 } IEM_MC_ENDIF();
4664 IEM_MC_ADVANCE_RIP();
4665 IEM_MC_END();
4666 }
4667 return VINF_SUCCESS;
4668}
4669
4670
4671/** Opcode 0x0f 0x94. */
4672FNIEMOP_DEF(iemOp_sete_Eb)
4673{
4674 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4675 IEMOP_HLP_MIN_386();
4676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4677
4678 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4679 * any way. AMD says it's "unused", whatever that means. We're
4680 * ignoring for now. */
4681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4682 {
4683 /* register target */
4684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4685 IEM_MC_BEGIN(0, 0);
4686 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4687 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4688 } IEM_MC_ELSE() {
4689 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4690 } IEM_MC_ENDIF();
4691 IEM_MC_ADVANCE_RIP();
4692 IEM_MC_END();
4693 }
4694 else
4695 {
4696 /* memory target */
4697 IEM_MC_BEGIN(0, 1);
4698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4701 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4702 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4703 } IEM_MC_ELSE() {
4704 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4705 } IEM_MC_ENDIF();
4706 IEM_MC_ADVANCE_RIP();
4707 IEM_MC_END();
4708 }
4709 return VINF_SUCCESS;
4710}
4711
4712
4713/** Opcode 0x0f 0x95. */
4714FNIEMOP_DEF(iemOp_setne_Eb)
4715{
4716 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4717 IEMOP_HLP_MIN_386();
4718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4719
4720 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4721 * any way. AMD says it's "unused", whatever that means. We're
4722 * ignoring for now. */
4723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4724 {
4725 /* register target */
4726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4727 IEM_MC_BEGIN(0, 0);
4728 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4729 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4730 } IEM_MC_ELSE() {
4731 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4732 } IEM_MC_ENDIF();
4733 IEM_MC_ADVANCE_RIP();
4734 IEM_MC_END();
4735 }
4736 else
4737 {
4738 /* memory target */
4739 IEM_MC_BEGIN(0, 1);
4740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4744 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4745 } IEM_MC_ELSE() {
4746 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4747 } IEM_MC_ENDIF();
4748 IEM_MC_ADVANCE_RIP();
4749 IEM_MC_END();
4750 }
4751 return VINF_SUCCESS;
4752}
4753
4754
4755/** Opcode 0x0f 0x96. */
4756FNIEMOP_DEF(iemOp_setbe_Eb)
4757{
4758 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4759 IEMOP_HLP_MIN_386();
4760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4761
4762 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4763 * any way. AMD says it's "unused", whatever that means. We're
4764 * ignoring for now. */
4765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4766 {
4767 /* register target */
4768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4769 IEM_MC_BEGIN(0, 0);
4770 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4771 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4772 } IEM_MC_ELSE() {
4773 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4774 } IEM_MC_ENDIF();
4775 IEM_MC_ADVANCE_RIP();
4776 IEM_MC_END();
4777 }
4778 else
4779 {
4780 /* memory target */
4781 IEM_MC_BEGIN(0, 1);
4782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4785 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4786 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4787 } IEM_MC_ELSE() {
4788 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4789 } IEM_MC_ENDIF();
4790 IEM_MC_ADVANCE_RIP();
4791 IEM_MC_END();
4792 }
4793 return VINF_SUCCESS;
4794}
4795
4796
4797/** Opcode 0x0f 0x97. */
4798FNIEMOP_DEF(iemOp_setnbe_Eb)
4799{
4800 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4801 IEMOP_HLP_MIN_386();
4802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4803
4804 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4805 * any way. AMD says it's "unused", whatever that means. We're
4806 * ignoring for now. */
4807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4808 {
4809 /* register target */
4810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4811 IEM_MC_BEGIN(0, 0);
4812 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4813 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4814 } IEM_MC_ELSE() {
4815 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4816 } IEM_MC_ENDIF();
4817 IEM_MC_ADVANCE_RIP();
4818 IEM_MC_END();
4819 }
4820 else
4821 {
4822 /* memory target */
4823 IEM_MC_BEGIN(0, 1);
4824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4827 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4828 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4829 } IEM_MC_ELSE() {
4830 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4831 } IEM_MC_ENDIF();
4832 IEM_MC_ADVANCE_RIP();
4833 IEM_MC_END();
4834 }
4835 return VINF_SUCCESS;
4836}
4837
4838
4839/** Opcode 0x0f 0x98. */
4840FNIEMOP_DEF(iemOp_sets_Eb)
4841{
4842 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4843 IEMOP_HLP_MIN_386();
4844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4845
4846 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4847 * any way. AMD says it's "unused", whatever that means. We're
4848 * ignoring for now. */
4849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4850 {
4851 /* register target */
4852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4853 IEM_MC_BEGIN(0, 0);
4854 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4855 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4856 } IEM_MC_ELSE() {
4857 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4858 } IEM_MC_ENDIF();
4859 IEM_MC_ADVANCE_RIP();
4860 IEM_MC_END();
4861 }
4862 else
4863 {
4864 /* memory target */
4865 IEM_MC_BEGIN(0, 1);
4866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4869 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4870 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4871 } IEM_MC_ELSE() {
4872 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4873 } IEM_MC_ENDIF();
4874 IEM_MC_ADVANCE_RIP();
4875 IEM_MC_END();
4876 }
4877 return VINF_SUCCESS;
4878}
4879
4880
4881/** Opcode 0x0f 0x99. */
4882FNIEMOP_DEF(iemOp_setns_Eb)
4883{
4884 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4885 IEMOP_HLP_MIN_386();
4886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4887
4888 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4889 * any way. AMD says it's "unused", whatever that means. We're
4890 * ignoring for now. */
4891 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4892 {
4893 /* register target */
4894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4895 IEM_MC_BEGIN(0, 0);
4896 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4897 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4898 } IEM_MC_ELSE() {
4899 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4900 } IEM_MC_ENDIF();
4901 IEM_MC_ADVANCE_RIP();
4902 IEM_MC_END();
4903 }
4904 else
4905 {
4906 /* memory target */
4907 IEM_MC_BEGIN(0, 1);
4908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4911 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4912 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4913 } IEM_MC_ELSE() {
4914 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4915 } IEM_MC_ENDIF();
4916 IEM_MC_ADVANCE_RIP();
4917 IEM_MC_END();
4918 }
4919 return VINF_SUCCESS;
4920}
4921
4922
4923/** Opcode 0x0f 0x9a. */
4924FNIEMOP_DEF(iemOp_setp_Eb)
4925{
4926 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4927 IEMOP_HLP_MIN_386();
4928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4929
4930 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4931 * any way. AMD says it's "unused", whatever that means. We're
4932 * ignoring for now. */
4933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4934 {
4935 /* register target */
4936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4937 IEM_MC_BEGIN(0, 0);
4938 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4939 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4940 } IEM_MC_ELSE() {
4941 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4942 } IEM_MC_ENDIF();
4943 IEM_MC_ADVANCE_RIP();
4944 IEM_MC_END();
4945 }
4946 else
4947 {
4948 /* memory target */
4949 IEM_MC_BEGIN(0, 1);
4950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4953 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4954 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4955 } IEM_MC_ELSE() {
4956 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4957 } IEM_MC_ENDIF();
4958 IEM_MC_ADVANCE_RIP();
4959 IEM_MC_END();
4960 }
4961 return VINF_SUCCESS;
4962}
4963
4964
4965/** Opcode 0x0f 0x9b. */
4966FNIEMOP_DEF(iemOp_setnp_Eb)
4967{
4968 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4969 IEMOP_HLP_MIN_386();
4970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4971
4972 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4973 * any way. AMD says it's "unused", whatever that means. We're
4974 * ignoring for now. */
4975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4976 {
4977 /* register target */
4978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4979 IEM_MC_BEGIN(0, 0);
4980 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4981 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4982 } IEM_MC_ELSE() {
4983 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4984 } IEM_MC_ENDIF();
4985 IEM_MC_ADVANCE_RIP();
4986 IEM_MC_END();
4987 }
4988 else
4989 {
4990 /* memory target */
4991 IEM_MC_BEGIN(0, 1);
4992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4996 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4997 } IEM_MC_ELSE() {
4998 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4999 } IEM_MC_ENDIF();
5000 IEM_MC_ADVANCE_RIP();
5001 IEM_MC_END();
5002 }
5003 return VINF_SUCCESS;
5004}
5005
5006
5007/** Opcode 0x0f 0x9c. */
5008FNIEMOP_DEF(iemOp_setl_Eb)
5009{
5010 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5011 IEMOP_HLP_MIN_386();
5012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5013
5014 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5015 * any way. AMD says it's "unused", whatever that means. We're
5016 * ignoring for now. */
5017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5018 {
5019 /* register target */
5020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5021 IEM_MC_BEGIN(0, 0);
5022 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5023 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5024 } IEM_MC_ELSE() {
5025 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5026 } IEM_MC_ENDIF();
5027 IEM_MC_ADVANCE_RIP();
5028 IEM_MC_END();
5029 }
5030 else
5031 {
5032 /* memory target */
5033 IEM_MC_BEGIN(0, 1);
5034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5037 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5038 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5039 } IEM_MC_ELSE() {
5040 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5041 } IEM_MC_ENDIF();
5042 IEM_MC_ADVANCE_RIP();
5043 IEM_MC_END();
5044 }
5045 return VINF_SUCCESS;
5046}
5047
5048
5049/** Opcode 0x0f 0x9d. */
5050FNIEMOP_DEF(iemOp_setnl_Eb)
5051{
5052 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5053 IEMOP_HLP_MIN_386();
5054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5055
5056 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5057 * any way. AMD says it's "unused", whatever that means. We're
5058 * ignoring for now. */
5059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5060 {
5061 /* register target */
5062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5063 IEM_MC_BEGIN(0, 0);
5064 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5065 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5066 } IEM_MC_ELSE() {
5067 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5068 } IEM_MC_ENDIF();
5069 IEM_MC_ADVANCE_RIP();
5070 IEM_MC_END();
5071 }
5072 else
5073 {
5074 /* memory target */
5075 IEM_MC_BEGIN(0, 1);
5076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5079 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5080 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5081 } IEM_MC_ELSE() {
5082 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5083 } IEM_MC_ENDIF();
5084 IEM_MC_ADVANCE_RIP();
5085 IEM_MC_END();
5086 }
5087 return VINF_SUCCESS;
5088}
5089
5090
5091/** Opcode 0x0f 0x9e. */
5092FNIEMOP_DEF(iemOp_setle_Eb)
5093{
5094 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5095 IEMOP_HLP_MIN_386();
5096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5097
5098 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5099 * any way. AMD says it's "unused", whatever that means. We're
5100 * ignoring for now. */
5101 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5102 {
5103 /* register target */
5104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5105 IEM_MC_BEGIN(0, 0);
5106 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5107 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5108 } IEM_MC_ELSE() {
5109 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5110 } IEM_MC_ENDIF();
5111 IEM_MC_ADVANCE_RIP();
5112 IEM_MC_END();
5113 }
5114 else
5115 {
5116 /* memory target */
5117 IEM_MC_BEGIN(0, 1);
5118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5121 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5122 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5123 } IEM_MC_ELSE() {
5124 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5125 } IEM_MC_ENDIF();
5126 IEM_MC_ADVANCE_RIP();
5127 IEM_MC_END();
5128 }
5129 return VINF_SUCCESS;
5130}
5131
5132
5133/** Opcode 0x0f 0x9f. */
5134FNIEMOP_DEF(iemOp_setnle_Eb)
5135{
5136 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5137 IEMOP_HLP_MIN_386();
5138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5139
5140 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5141 * any way. AMD says it's "unused", whatever that means. We're
5142 * ignoring for now. */
5143 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5144 {
5145 /* register target */
5146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5147 IEM_MC_BEGIN(0, 0);
5148 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5149 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5150 } IEM_MC_ELSE() {
5151 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5152 } IEM_MC_ENDIF();
5153 IEM_MC_ADVANCE_RIP();
5154 IEM_MC_END();
5155 }
5156 else
5157 {
5158 /* memory target */
5159 IEM_MC_BEGIN(0, 1);
5160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5163 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5164 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5165 } IEM_MC_ELSE() {
5166 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5167 } IEM_MC_ENDIF();
5168 IEM_MC_ADVANCE_RIP();
5169 IEM_MC_END();
5170 }
5171 return VINF_SUCCESS;
5172}
5173
5174
5175/**
5176 * Common 'push segment-register' helper.
5177 */
5178FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5179{
5180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5181 if (iReg < X86_SREG_FS)
5182 IEMOP_HLP_NO_64BIT();
5183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5184
5185 switch (pVCpu->iem.s.enmEffOpSize)
5186 {
5187 case IEMMODE_16BIT:
5188 IEM_MC_BEGIN(0, 1);
5189 IEM_MC_LOCAL(uint16_t, u16Value);
5190 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5191 IEM_MC_PUSH_U16(u16Value);
5192 IEM_MC_ADVANCE_RIP();
5193 IEM_MC_END();
5194 break;
5195
5196 case IEMMODE_32BIT:
5197 IEM_MC_BEGIN(0, 1);
5198 IEM_MC_LOCAL(uint32_t, u32Value);
5199 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5200 IEM_MC_PUSH_U32_SREG(u32Value);
5201 IEM_MC_ADVANCE_RIP();
5202 IEM_MC_END();
5203 break;
5204
5205 case IEMMODE_64BIT:
5206 IEM_MC_BEGIN(0, 1);
5207 IEM_MC_LOCAL(uint64_t, u64Value);
5208 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5209 IEM_MC_PUSH_U64(u64Value);
5210 IEM_MC_ADVANCE_RIP();
5211 IEM_MC_END();
5212 break;
5213 }
5214
5215 return VINF_SUCCESS;
5216}
5217
5218
5219/** Opcode 0x0f 0xa0. */
5220FNIEMOP_DEF(iemOp_push_fs)
5221{
5222 IEMOP_MNEMONIC(push_fs, "push fs");
5223 IEMOP_HLP_MIN_386();
5224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5225 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5226}
5227
5228
5229/** Opcode 0x0f 0xa1. */
5230FNIEMOP_DEF(iemOp_pop_fs)
5231{
5232 IEMOP_MNEMONIC(pop_fs, "pop fs");
5233 IEMOP_HLP_MIN_386();
5234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5235 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5236}
5237
5238
5239/** Opcode 0x0f 0xa2. */
5240FNIEMOP_DEF(iemOp_cpuid)
5241{
5242 IEMOP_MNEMONIC(cpuid, "cpuid");
5243 IEMOP_HLP_MIN_486(); /* not all 486es. */
5244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5245 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5246}
5247
5248
5249/**
5250 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5251 * iemOp_bts_Ev_Gv.
5252 */
5253FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5254{
5255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5256 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5257
5258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5259 {
5260 /* register destination. */
5261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5262 switch (pVCpu->iem.s.enmEffOpSize)
5263 {
5264 case IEMMODE_16BIT:
5265 IEM_MC_BEGIN(3, 0);
5266 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5267 IEM_MC_ARG(uint16_t, u16Src, 1);
5268 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5269
5270 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5271 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5272 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5273 IEM_MC_REF_EFLAGS(pEFlags);
5274 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5275
5276 IEM_MC_ADVANCE_RIP();
5277 IEM_MC_END();
5278 return VINF_SUCCESS;
5279
5280 case IEMMODE_32BIT:
5281 IEM_MC_BEGIN(3, 0);
5282 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5283 IEM_MC_ARG(uint32_t, u32Src, 1);
5284 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5285
5286 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5287 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5288 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5289 IEM_MC_REF_EFLAGS(pEFlags);
5290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5291
5292 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5293 IEM_MC_ADVANCE_RIP();
5294 IEM_MC_END();
5295 return VINF_SUCCESS;
5296
5297 case IEMMODE_64BIT:
5298 IEM_MC_BEGIN(3, 0);
5299 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5300 IEM_MC_ARG(uint64_t, u64Src, 1);
5301 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5302
5303 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5304 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5305 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5306 IEM_MC_REF_EFLAGS(pEFlags);
5307 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5308
5309 IEM_MC_ADVANCE_RIP();
5310 IEM_MC_END();
5311 return VINF_SUCCESS;
5312
5313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5314 }
5315 }
5316 else
5317 {
5318 /* memory destination. */
5319
5320 uint32_t fAccess;
5321 if (pImpl->pfnLockedU16)
5322 fAccess = IEM_ACCESS_DATA_RW;
5323 else /* BT */
5324 fAccess = IEM_ACCESS_DATA_R;
5325
5326 /** @todo test negative bit offsets! */
5327 switch (pVCpu->iem.s.enmEffOpSize)
5328 {
5329 case IEMMODE_16BIT:
5330 IEM_MC_BEGIN(3, 2);
5331 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5332 IEM_MC_ARG(uint16_t, u16Src, 1);
5333 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5335 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5336
5337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5338 if (pImpl->pfnLockedU16)
5339 IEMOP_HLP_DONE_DECODING();
5340 else
5341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5342 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5343 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5344 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5345 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5346 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5347 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5348 IEM_MC_FETCH_EFLAGS(EFlags);
5349
5350 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5351 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5352 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5353 else
5354 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5355 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5356
5357 IEM_MC_COMMIT_EFLAGS(EFlags);
5358 IEM_MC_ADVANCE_RIP();
5359 IEM_MC_END();
5360 return VINF_SUCCESS;
5361
5362 case IEMMODE_32BIT:
5363 IEM_MC_BEGIN(3, 2);
5364 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5365 IEM_MC_ARG(uint32_t, u32Src, 1);
5366 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5368 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5369
5370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5371 if (pImpl->pfnLockedU16)
5372 IEMOP_HLP_DONE_DECODING();
5373 else
5374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5375 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5376 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5377 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5378 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5379 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5380 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5381 IEM_MC_FETCH_EFLAGS(EFlags);
5382
5383 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5384 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5386 else
5387 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5388 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5389
5390 IEM_MC_COMMIT_EFLAGS(EFlags);
5391 IEM_MC_ADVANCE_RIP();
5392 IEM_MC_END();
5393 return VINF_SUCCESS;
5394
5395 case IEMMODE_64BIT:
5396 IEM_MC_BEGIN(3, 2);
5397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5398 IEM_MC_ARG(uint64_t, u64Src, 1);
5399 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5401 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5402
5403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5404 if (pImpl->pfnLockedU16)
5405 IEMOP_HLP_DONE_DECODING();
5406 else
5407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5408 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5409 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5410 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5411 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5412 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5413 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5414 IEM_MC_FETCH_EFLAGS(EFlags);
5415
5416 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5417 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5418 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5419 else
5420 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5421 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5422
5423 IEM_MC_COMMIT_EFLAGS(EFlags);
5424 IEM_MC_ADVANCE_RIP();
5425 IEM_MC_END();
5426 return VINF_SUCCESS;
5427
5428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5429 }
5430 }
5431}
5432
5433
5434/** Opcode 0x0f 0xa3. */
5435FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5436{
5437 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5438 IEMOP_HLP_MIN_386();
5439 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5440}
5441
5442
5443/**
5444 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5445 */
5446FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5447{
5448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5449 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5450
5451 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5452 {
5453 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5455
5456 switch (pVCpu->iem.s.enmEffOpSize)
5457 {
5458 case IEMMODE_16BIT:
5459 IEM_MC_BEGIN(4, 0);
5460 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5461 IEM_MC_ARG(uint16_t, u16Src, 1);
5462 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5463 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5464
5465 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5466 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5467 IEM_MC_REF_EFLAGS(pEFlags);
5468 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5469
5470 IEM_MC_ADVANCE_RIP();
5471 IEM_MC_END();
5472 return VINF_SUCCESS;
5473
5474 case IEMMODE_32BIT:
5475 IEM_MC_BEGIN(4, 0);
5476 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5477 IEM_MC_ARG(uint32_t, u32Src, 1);
5478 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5479 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5480
5481 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5482 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5483 IEM_MC_REF_EFLAGS(pEFlags);
5484 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5485
5486 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5487 IEM_MC_ADVANCE_RIP();
5488 IEM_MC_END();
5489 return VINF_SUCCESS;
5490
5491 case IEMMODE_64BIT:
5492 IEM_MC_BEGIN(4, 0);
5493 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5494 IEM_MC_ARG(uint64_t, u64Src, 1);
5495 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5496 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5497
5498 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5499 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5500 IEM_MC_REF_EFLAGS(pEFlags);
5501 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5502
5503 IEM_MC_ADVANCE_RIP();
5504 IEM_MC_END();
5505 return VINF_SUCCESS;
5506
5507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5508 }
5509 }
5510 else
5511 {
5512 switch (pVCpu->iem.s.enmEffOpSize)
5513 {
5514 case IEMMODE_16BIT:
5515 IEM_MC_BEGIN(4, 2);
5516 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5517 IEM_MC_ARG(uint16_t, u16Src, 1);
5518 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5519 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5521
5522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5523 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5524 IEM_MC_ASSIGN(cShiftArg, cShift);
5525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5526 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5527 IEM_MC_FETCH_EFLAGS(EFlags);
5528 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5529 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5530
5531 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5532 IEM_MC_COMMIT_EFLAGS(EFlags);
5533 IEM_MC_ADVANCE_RIP();
5534 IEM_MC_END();
5535 return VINF_SUCCESS;
5536
5537 case IEMMODE_32BIT:
5538 IEM_MC_BEGIN(4, 2);
5539 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5540 IEM_MC_ARG(uint32_t, u32Src, 1);
5541 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5542 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5544
5545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5546 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5547 IEM_MC_ASSIGN(cShiftArg, cShift);
5548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5549 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5550 IEM_MC_FETCH_EFLAGS(EFlags);
5551 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5552 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5553
5554 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5555 IEM_MC_COMMIT_EFLAGS(EFlags);
5556 IEM_MC_ADVANCE_RIP();
5557 IEM_MC_END();
5558 return VINF_SUCCESS;
5559
5560 case IEMMODE_64BIT:
5561 IEM_MC_BEGIN(4, 2);
5562 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5563 IEM_MC_ARG(uint64_t, u64Src, 1);
5564 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5565 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5567
5568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5569 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5570 IEM_MC_ASSIGN(cShiftArg, cShift);
5571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5572 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5573 IEM_MC_FETCH_EFLAGS(EFlags);
5574 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5575 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5576
5577 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5578 IEM_MC_COMMIT_EFLAGS(EFlags);
5579 IEM_MC_ADVANCE_RIP();
5580 IEM_MC_END();
5581 return VINF_SUCCESS;
5582
5583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5584 }
5585 }
5586}
5587
5588
5589/**
5590 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5591 */
5592FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5593{
5594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5596
5597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5598 {
5599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5600
5601 switch (pVCpu->iem.s.enmEffOpSize)
5602 {
5603 case IEMMODE_16BIT:
5604 IEM_MC_BEGIN(4, 0);
5605 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5606 IEM_MC_ARG(uint16_t, u16Src, 1);
5607 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5608 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5609
5610 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5611 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5612 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5613 IEM_MC_REF_EFLAGS(pEFlags);
5614 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5615
5616 IEM_MC_ADVANCE_RIP();
5617 IEM_MC_END();
5618 return VINF_SUCCESS;
5619
5620 case IEMMODE_32BIT:
5621 IEM_MC_BEGIN(4, 0);
5622 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5623 IEM_MC_ARG(uint32_t, u32Src, 1);
5624 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5625 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5626
5627 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5628 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5629 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5630 IEM_MC_REF_EFLAGS(pEFlags);
5631 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5632
5633 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5634 IEM_MC_ADVANCE_RIP();
5635 IEM_MC_END();
5636 return VINF_SUCCESS;
5637
5638 case IEMMODE_64BIT:
5639 IEM_MC_BEGIN(4, 0);
5640 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5641 IEM_MC_ARG(uint64_t, u64Src, 1);
5642 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5643 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5644
5645 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5646 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5647 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5648 IEM_MC_REF_EFLAGS(pEFlags);
5649 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5650
5651 IEM_MC_ADVANCE_RIP();
5652 IEM_MC_END();
5653 return VINF_SUCCESS;
5654
5655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5656 }
5657 }
5658 else
5659 {
5660 switch (pVCpu->iem.s.enmEffOpSize)
5661 {
5662 case IEMMODE_16BIT:
5663 IEM_MC_BEGIN(4, 2);
5664 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5665 IEM_MC_ARG(uint16_t, u16Src, 1);
5666 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5667 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5669
5670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5672 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5673 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5674 IEM_MC_FETCH_EFLAGS(EFlags);
5675 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5676 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5677
5678 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5679 IEM_MC_COMMIT_EFLAGS(EFlags);
5680 IEM_MC_ADVANCE_RIP();
5681 IEM_MC_END();
5682 return VINF_SUCCESS;
5683
5684 case IEMMODE_32BIT:
5685 IEM_MC_BEGIN(4, 2);
5686 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5687 IEM_MC_ARG(uint32_t, u32Src, 1);
5688 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5691
5692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5694 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5695 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5696 IEM_MC_FETCH_EFLAGS(EFlags);
5697 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5698 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5699
5700 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5701 IEM_MC_COMMIT_EFLAGS(EFlags);
5702 IEM_MC_ADVANCE_RIP();
5703 IEM_MC_END();
5704 return VINF_SUCCESS;
5705
5706 case IEMMODE_64BIT:
5707 IEM_MC_BEGIN(4, 2);
5708 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5709 IEM_MC_ARG(uint64_t, u64Src, 1);
5710 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5711 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5713
5714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5716 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5717 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5718 IEM_MC_FETCH_EFLAGS(EFlags);
5719 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5720 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5721
5722 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5723 IEM_MC_COMMIT_EFLAGS(EFlags);
5724 IEM_MC_ADVANCE_RIP();
5725 IEM_MC_END();
5726 return VINF_SUCCESS;
5727
5728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5729 }
5730 }
5731}
5732
5733
5734
5735/** Opcode 0x0f 0xa4. */
5736FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5737{
5738 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5739 IEMOP_HLP_MIN_386();
5740 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5741}
5742
5743
5744/** Opcode 0x0f 0xa5. */
5745FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5746{
5747 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5748 IEMOP_HLP_MIN_386();
5749 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5750}
5751
5752
5753/** Opcode 0x0f 0xa8. */
5754FNIEMOP_DEF(iemOp_push_gs)
5755{
5756 IEMOP_MNEMONIC(push_gs, "push gs");
5757 IEMOP_HLP_MIN_386();
5758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5759 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5760}
5761
5762
5763/** Opcode 0x0f 0xa9. */
5764FNIEMOP_DEF(iemOp_pop_gs)
5765{
5766 IEMOP_MNEMONIC(pop_gs, "pop gs");
5767 IEMOP_HLP_MIN_386();
5768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5769 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5770}
5771
5772
5773/** Opcode 0x0f 0xaa. */
5774FNIEMOP_STUB(iemOp_rsm);
5775//IEMOP_HLP_MIN_386();
5776
5777
5778/** Opcode 0x0f 0xab. */
5779FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5780{
5781 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5782 IEMOP_HLP_MIN_386();
5783 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5784}
5785
5786
5787/** Opcode 0x0f 0xac. */
5788FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5789{
5790 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5791 IEMOP_HLP_MIN_386();
5792 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5793}
5794
5795
5796/** Opcode 0x0f 0xad. */
5797FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5798{
5799 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5800 IEMOP_HLP_MIN_386();
5801 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5802}
5803
5804
5805/** Opcode 0x0f 0xae mem/0. */
5806FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5807{
5808 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5809 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5810 return IEMOP_RAISE_INVALID_OPCODE();
5811
5812 IEM_MC_BEGIN(3, 1);
5813 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5814 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5815 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5818 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5819 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5820 IEM_MC_END();
5821 return VINF_SUCCESS;
5822}
5823
5824
5825/** Opcode 0x0f 0xae mem/1. */
5826FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5827{
5828 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5829 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5830 return IEMOP_RAISE_INVALID_OPCODE();
5831
5832 IEM_MC_BEGIN(3, 1);
5833 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5834 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5835 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5838 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5839 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5840 IEM_MC_END();
5841 return VINF_SUCCESS;
5842}
5843
5844
5845/** Opcode 0x0f 0xae mem/2. */
5846FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5847
5848/** Opcode 0x0f 0xae mem/3. */
5849FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5850
5851/** Opcode 0x0f 0xae mem/4. */
5852FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5853
5854/** Opcode 0x0f 0xae mem/5. */
5855FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5856
5857/** Opcode 0x0f 0xae mem/6. */
5858FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5859
5860/** Opcode 0x0f 0xae mem/7. */
5861FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5862
5863
5864/** Opcode 0x0f 0xae 11b/5. */
5865FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5866{
5867 RT_NOREF_PV(bRm);
5868 IEMOP_MNEMONIC(lfence, "lfence");
5869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5870 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5871 return IEMOP_RAISE_INVALID_OPCODE();
5872
5873 IEM_MC_BEGIN(0, 0);
5874 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5875 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5876 else
5877 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5878 IEM_MC_ADVANCE_RIP();
5879 IEM_MC_END();
5880 return VINF_SUCCESS;
5881}
5882
5883
5884/** Opcode 0x0f 0xae 11b/6. */
5885FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5886{
5887 RT_NOREF_PV(bRm);
5888 IEMOP_MNEMONIC(mfence, "mfence");
5889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5890 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5891 return IEMOP_RAISE_INVALID_OPCODE();
5892
5893 IEM_MC_BEGIN(0, 0);
5894 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5895 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5896 else
5897 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5898 IEM_MC_ADVANCE_RIP();
5899 IEM_MC_END();
5900 return VINF_SUCCESS;
5901}
5902
5903
5904/** Opcode 0x0f 0xae 11b/7. */
5905FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5906{
5907 RT_NOREF_PV(bRm);
5908 IEMOP_MNEMONIC(sfence, "sfence");
5909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5911 return IEMOP_RAISE_INVALID_OPCODE();
5912
5913 IEM_MC_BEGIN(0, 0);
5914 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5915 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5916 else
5917 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5918 IEM_MC_ADVANCE_RIP();
5919 IEM_MC_END();
5920 return VINF_SUCCESS;
5921}
5922
5923
5924/** Opcode 0xf3 0x0f 0xae 11b/0. */
5925FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5926
5927/** Opcode 0xf3 0x0f 0xae 11b/1. */
5928FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5929
5930/** Opcode 0xf3 0x0f 0xae 11b/2. */
5931FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5932
5933/** Opcode 0xf3 0x0f 0xae 11b/3. */
5934FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5935
5936
5937/** Opcode 0x0f 0xae. */
5938FNIEMOP_DEF(iemOp_Grp15)
5939{
5940 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5942 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5943 {
5944 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5945 {
5946 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5947 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5948 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5949 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5950 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5951 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5952 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5953 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5955 }
5956 }
5957 else
5958 {
5959 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5960 {
5961 case 0:
5962 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5963 {
5964 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5965 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5966 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5967 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5968 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5969 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5970 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5971 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5973 }
5974 break;
5975
5976 case IEM_OP_PRF_REPZ:
5977 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5978 {
5979 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5980 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5981 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5982 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5983 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5984 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5985 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5986 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5988 }
5989 break;
5990
5991 default:
5992 return IEMOP_RAISE_INVALID_OPCODE();
5993 }
5994 }
5995}
5996
5997
5998/** Opcode 0x0f 0xaf. */
5999FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6000{
6001 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6002 IEMOP_HLP_MIN_386();
6003 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6005}
6006
6007
6008/** Opcode 0x0f 0xb0. */
6009FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6010{
6011 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6012 IEMOP_HLP_MIN_486();
6013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6014
6015 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6016 {
6017 IEMOP_HLP_DONE_DECODING();
6018 IEM_MC_BEGIN(4, 0);
6019 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6020 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6021 IEM_MC_ARG(uint8_t, u8Src, 2);
6022 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6023
6024 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6025 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6026 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6027 IEM_MC_REF_EFLAGS(pEFlags);
6028 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6029 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6030 else
6031 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6032
6033 IEM_MC_ADVANCE_RIP();
6034 IEM_MC_END();
6035 }
6036 else
6037 {
6038 IEM_MC_BEGIN(4, 3);
6039 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6040 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6041 IEM_MC_ARG(uint8_t, u8Src, 2);
6042 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6044 IEM_MC_LOCAL(uint8_t, u8Al);
6045
6046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6047 IEMOP_HLP_DONE_DECODING();
6048 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6049 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6050 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6051 IEM_MC_FETCH_EFLAGS(EFlags);
6052 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6053 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6054 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6055 else
6056 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6057
6058 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6059 IEM_MC_COMMIT_EFLAGS(EFlags);
6060 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6061 IEM_MC_ADVANCE_RIP();
6062 IEM_MC_END();
6063 }
6064 return VINF_SUCCESS;
6065}
6066
6067/** Opcode 0x0f 0xb1. */
6068FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6069{
6070 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6071 IEMOP_HLP_MIN_486();
6072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6073
6074 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6075 {
6076 IEMOP_HLP_DONE_DECODING();
6077 switch (pVCpu->iem.s.enmEffOpSize)
6078 {
6079 case IEMMODE_16BIT:
6080 IEM_MC_BEGIN(4, 0);
6081 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6082 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6083 IEM_MC_ARG(uint16_t, u16Src, 2);
6084 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6085
6086 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6087 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6088 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6089 IEM_MC_REF_EFLAGS(pEFlags);
6090 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6091 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6092 else
6093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6094
6095 IEM_MC_ADVANCE_RIP();
6096 IEM_MC_END();
6097 return VINF_SUCCESS;
6098
6099 case IEMMODE_32BIT:
6100 IEM_MC_BEGIN(4, 0);
6101 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6102 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6103 IEM_MC_ARG(uint32_t, u32Src, 2);
6104 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6105
6106 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6107 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6108 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6109 IEM_MC_REF_EFLAGS(pEFlags);
6110 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6111 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6112 else
6113 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6114
6115 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6116 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6117 IEM_MC_ADVANCE_RIP();
6118 IEM_MC_END();
6119 return VINF_SUCCESS;
6120
6121 case IEMMODE_64BIT:
6122 IEM_MC_BEGIN(4, 0);
6123 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6124 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6125#ifdef RT_ARCH_X86
6126 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6127#else
6128 IEM_MC_ARG(uint64_t, u64Src, 2);
6129#endif
6130 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6131
6132 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6133 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6134 IEM_MC_REF_EFLAGS(pEFlags);
6135#ifdef RT_ARCH_X86
6136 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6137 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6138 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6139 else
6140 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6141#else
6142 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6143 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6144 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6145 else
6146 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6147#endif
6148
6149 IEM_MC_ADVANCE_RIP();
6150 IEM_MC_END();
6151 return VINF_SUCCESS;
6152
6153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6154 }
6155 }
6156 else
6157 {
6158 switch (pVCpu->iem.s.enmEffOpSize)
6159 {
6160 case IEMMODE_16BIT:
6161 IEM_MC_BEGIN(4, 3);
6162 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6163 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6164 IEM_MC_ARG(uint16_t, u16Src, 2);
6165 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6167 IEM_MC_LOCAL(uint16_t, u16Ax);
6168
6169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6170 IEMOP_HLP_DONE_DECODING();
6171 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6172 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6173 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6174 IEM_MC_FETCH_EFLAGS(EFlags);
6175 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6176 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6177 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6178 else
6179 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6180
6181 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6182 IEM_MC_COMMIT_EFLAGS(EFlags);
6183 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6184 IEM_MC_ADVANCE_RIP();
6185 IEM_MC_END();
6186 return VINF_SUCCESS;
6187
6188 case IEMMODE_32BIT:
6189 IEM_MC_BEGIN(4, 3);
6190 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6191 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6192 IEM_MC_ARG(uint32_t, u32Src, 2);
6193 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6195 IEM_MC_LOCAL(uint32_t, u32Eax);
6196
6197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6198 IEMOP_HLP_DONE_DECODING();
6199 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6200 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6201 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6202 IEM_MC_FETCH_EFLAGS(EFlags);
6203 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6204 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6205 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6206 else
6207 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6208
6209 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6210 IEM_MC_COMMIT_EFLAGS(EFlags);
6211 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6212 IEM_MC_ADVANCE_RIP();
6213 IEM_MC_END();
6214 return VINF_SUCCESS;
6215
6216 case IEMMODE_64BIT:
6217 IEM_MC_BEGIN(4, 3);
6218 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6219 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6220#ifdef RT_ARCH_X86
6221 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6222#else
6223 IEM_MC_ARG(uint64_t, u64Src, 2);
6224#endif
6225 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6227 IEM_MC_LOCAL(uint64_t, u64Rax);
6228
6229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6230 IEMOP_HLP_DONE_DECODING();
6231 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6232 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6233 IEM_MC_FETCH_EFLAGS(EFlags);
6234 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6235#ifdef RT_ARCH_X86
6236 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6238 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6239 else
6240 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6241#else
6242 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6243 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6244 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6245 else
6246 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6247#endif
6248
6249 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6250 IEM_MC_COMMIT_EFLAGS(EFlags);
6251 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6252 IEM_MC_ADVANCE_RIP();
6253 IEM_MC_END();
6254 return VINF_SUCCESS;
6255
6256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6257 }
6258 }
6259}
6260
6261
6262FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6263{
6264 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6265 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6266
6267 switch (pVCpu->iem.s.enmEffOpSize)
6268 {
6269 case IEMMODE_16BIT:
6270 IEM_MC_BEGIN(5, 1);
6271 IEM_MC_ARG(uint16_t, uSel, 0);
6272 IEM_MC_ARG(uint16_t, offSeg, 1);
6273 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6274 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6275 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6276 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6279 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6280 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6281 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6282 IEM_MC_END();
6283 return VINF_SUCCESS;
6284
6285 case IEMMODE_32BIT:
6286 IEM_MC_BEGIN(5, 1);
6287 IEM_MC_ARG(uint16_t, uSel, 0);
6288 IEM_MC_ARG(uint32_t, offSeg, 1);
6289 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6290 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6291 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6292 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6295 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6296 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6297 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6298 IEM_MC_END();
6299 return VINF_SUCCESS;
6300
6301 case IEMMODE_64BIT:
6302 IEM_MC_BEGIN(5, 1);
6303 IEM_MC_ARG(uint16_t, uSel, 0);
6304 IEM_MC_ARG(uint64_t, offSeg, 1);
6305 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6306 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6307 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6308 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6311 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6312 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6313 else
6314 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6315 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6316 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6317 IEM_MC_END();
6318 return VINF_SUCCESS;
6319
6320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6321 }
6322}
6323
6324
6325/** Opcode 0x0f 0xb2. */
6326FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6327{
6328 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6329 IEMOP_HLP_MIN_386();
6330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6332 return IEMOP_RAISE_INVALID_OPCODE();
6333 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6334}
6335
6336
6337/** Opcode 0x0f 0xb3. */
6338FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6339{
6340 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6341 IEMOP_HLP_MIN_386();
6342 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6343}
6344
6345
6346/** Opcode 0x0f 0xb4. */
6347FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6348{
6349 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6350 IEMOP_HLP_MIN_386();
6351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6353 return IEMOP_RAISE_INVALID_OPCODE();
6354 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6355}
6356
6357
6358/** Opcode 0x0f 0xb5. */
6359FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6360{
6361 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6362 IEMOP_HLP_MIN_386();
6363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6364 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6365 return IEMOP_RAISE_INVALID_OPCODE();
6366 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6367}
6368
6369
6370/** Opcode 0x0f 0xb6. */
6371FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6372{
6373 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6374 IEMOP_HLP_MIN_386();
6375
6376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6377
6378 /*
6379 * If rm is denoting a register, no more instruction bytes.
6380 */
6381 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6382 {
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 switch (pVCpu->iem.s.enmEffOpSize)
6385 {
6386 case IEMMODE_16BIT:
6387 IEM_MC_BEGIN(0, 1);
6388 IEM_MC_LOCAL(uint16_t, u16Value);
6389 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6390 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6391 IEM_MC_ADVANCE_RIP();
6392 IEM_MC_END();
6393 return VINF_SUCCESS;
6394
6395 case IEMMODE_32BIT:
6396 IEM_MC_BEGIN(0, 1);
6397 IEM_MC_LOCAL(uint32_t, u32Value);
6398 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6399 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6400 IEM_MC_ADVANCE_RIP();
6401 IEM_MC_END();
6402 return VINF_SUCCESS;
6403
6404 case IEMMODE_64BIT:
6405 IEM_MC_BEGIN(0, 1);
6406 IEM_MC_LOCAL(uint64_t, u64Value);
6407 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6408 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6409 IEM_MC_ADVANCE_RIP();
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412
6413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6414 }
6415 }
6416 else
6417 {
6418 /*
6419 * We're loading a register from memory.
6420 */
6421 switch (pVCpu->iem.s.enmEffOpSize)
6422 {
6423 case IEMMODE_16BIT:
6424 IEM_MC_BEGIN(0, 2);
6425 IEM_MC_LOCAL(uint16_t, u16Value);
6426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6429 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6430 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6431 IEM_MC_ADVANCE_RIP();
6432 IEM_MC_END();
6433 return VINF_SUCCESS;
6434
6435 case IEMMODE_32BIT:
6436 IEM_MC_BEGIN(0, 2);
6437 IEM_MC_LOCAL(uint32_t, u32Value);
6438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6441 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6442 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6443 IEM_MC_ADVANCE_RIP();
6444 IEM_MC_END();
6445 return VINF_SUCCESS;
6446
6447 case IEMMODE_64BIT:
6448 IEM_MC_BEGIN(0, 2);
6449 IEM_MC_LOCAL(uint64_t, u64Value);
6450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6453 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6454 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6455 IEM_MC_ADVANCE_RIP();
6456 IEM_MC_END();
6457 return VINF_SUCCESS;
6458
6459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6460 }
6461 }
6462}
6463
6464
6465/** Opcode 0x0f 0xb7. */
6466FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6467{
6468 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6469 IEMOP_HLP_MIN_386();
6470
6471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6472
6473 /** @todo Not entirely sure how the operand size prefix is handled here,
6474 * assuming that it will be ignored. Would be nice to have a few
6475 * test for this. */
6476 /*
6477 * If rm is denoting a register, no more instruction bytes.
6478 */
6479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6480 {
6481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6482 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6483 {
6484 IEM_MC_BEGIN(0, 1);
6485 IEM_MC_LOCAL(uint32_t, u32Value);
6486 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6487 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6488 IEM_MC_ADVANCE_RIP();
6489 IEM_MC_END();
6490 }
6491 else
6492 {
6493 IEM_MC_BEGIN(0, 1);
6494 IEM_MC_LOCAL(uint64_t, u64Value);
6495 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6496 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6497 IEM_MC_ADVANCE_RIP();
6498 IEM_MC_END();
6499 }
6500 }
6501 else
6502 {
6503 /*
6504 * We're loading a register from memory.
6505 */
6506 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6507 {
6508 IEM_MC_BEGIN(0, 2);
6509 IEM_MC_LOCAL(uint32_t, u32Value);
6510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6513 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6514 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6515 IEM_MC_ADVANCE_RIP();
6516 IEM_MC_END();
6517 }
6518 else
6519 {
6520 IEM_MC_BEGIN(0, 2);
6521 IEM_MC_LOCAL(uint64_t, u64Value);
6522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6525 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6526 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6527 IEM_MC_ADVANCE_RIP();
6528 IEM_MC_END();
6529 }
6530 }
6531 return VINF_SUCCESS;
6532}
6533
6534
6535/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6536FNIEMOP_UD_STUB(iemOp_jmpe);
6537/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6538FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6539
6540
6541/** Opcode 0x0f 0xb9. */
6542FNIEMOP_DEF(iemOp_Grp10)
6543{
6544 Log(("iemOp_Grp10 -> #UD\n"));
6545 return IEMOP_RAISE_INVALID_OPCODE();
6546}
6547
6548
6549/** Opcode 0x0f 0xba. */
6550FNIEMOP_DEF(iemOp_Grp8)
6551{
6552 IEMOP_HLP_MIN_386();
6553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6554 PCIEMOPBINSIZES pImpl;
6555 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6556 {
6557 case 0: case 1: case 2: case 3:
6558 return IEMOP_RAISE_INVALID_OPCODE();
6559 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6560 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6561 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6562 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6564 }
6565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6566
6567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6568 {
6569 /* register destination. */
6570 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6572
6573 switch (pVCpu->iem.s.enmEffOpSize)
6574 {
6575 case IEMMODE_16BIT:
6576 IEM_MC_BEGIN(3, 0);
6577 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6578 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6579 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6580
6581 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6582 IEM_MC_REF_EFLAGS(pEFlags);
6583 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6584
6585 IEM_MC_ADVANCE_RIP();
6586 IEM_MC_END();
6587 return VINF_SUCCESS;
6588
6589 case IEMMODE_32BIT:
6590 IEM_MC_BEGIN(3, 0);
6591 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6592 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6593 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6594
6595 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6596 IEM_MC_REF_EFLAGS(pEFlags);
6597 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6598
6599 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6600 IEM_MC_ADVANCE_RIP();
6601 IEM_MC_END();
6602 return VINF_SUCCESS;
6603
6604 case IEMMODE_64BIT:
6605 IEM_MC_BEGIN(3, 0);
6606 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6607 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6608 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6609
6610 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6611 IEM_MC_REF_EFLAGS(pEFlags);
6612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6613
6614 IEM_MC_ADVANCE_RIP();
6615 IEM_MC_END();
6616 return VINF_SUCCESS;
6617
6618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6619 }
6620 }
6621 else
6622 {
6623 /* memory destination. */
6624
6625 uint32_t fAccess;
6626 if (pImpl->pfnLockedU16)
6627 fAccess = IEM_ACCESS_DATA_RW;
6628 else /* BT */
6629 fAccess = IEM_ACCESS_DATA_R;
6630
6631 /** @todo test negative bit offsets! */
6632 switch (pVCpu->iem.s.enmEffOpSize)
6633 {
6634 case IEMMODE_16BIT:
6635 IEM_MC_BEGIN(3, 1);
6636 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6637 IEM_MC_ARG(uint16_t, u16Src, 1);
6638 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6640
6641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6642 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6643 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6644 if (pImpl->pfnLockedU16)
6645 IEMOP_HLP_DONE_DECODING();
6646 else
6647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6648 IEM_MC_FETCH_EFLAGS(EFlags);
6649 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6650 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6651 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6652 else
6653 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6655
6656 IEM_MC_COMMIT_EFLAGS(EFlags);
6657 IEM_MC_ADVANCE_RIP();
6658 IEM_MC_END();
6659 return VINF_SUCCESS;
6660
6661 case IEMMODE_32BIT:
6662 IEM_MC_BEGIN(3, 1);
6663 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6664 IEM_MC_ARG(uint32_t, u32Src, 1);
6665 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6667
6668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6669 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6670 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6671 if (pImpl->pfnLockedU16)
6672 IEMOP_HLP_DONE_DECODING();
6673 else
6674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6675 IEM_MC_FETCH_EFLAGS(EFlags);
6676 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6678 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6679 else
6680 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6681 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6682
6683 IEM_MC_COMMIT_EFLAGS(EFlags);
6684 IEM_MC_ADVANCE_RIP();
6685 IEM_MC_END();
6686 return VINF_SUCCESS;
6687
6688 case IEMMODE_64BIT:
6689 IEM_MC_BEGIN(3, 1);
6690 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6691 IEM_MC_ARG(uint64_t, u64Src, 1);
6692 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6694
6695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6696 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6697 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6698 if (pImpl->pfnLockedU16)
6699 IEMOP_HLP_DONE_DECODING();
6700 else
6701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6702 IEM_MC_FETCH_EFLAGS(EFlags);
6703 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6704 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6705 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6706 else
6707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6708 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6709
6710 IEM_MC_COMMIT_EFLAGS(EFlags);
6711 IEM_MC_ADVANCE_RIP();
6712 IEM_MC_END();
6713 return VINF_SUCCESS;
6714
6715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6716 }
6717 }
6718
6719}
6720
6721
6722/** Opcode 0x0f 0xbb. */
6723FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6724{
6725 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6726 IEMOP_HLP_MIN_386();
6727 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6728}
6729
6730
6731/** Opcode 0x0f 0xbc. */
6732FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6733{
6734 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6735 IEMOP_HLP_MIN_386();
6736 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6738}
6739
6740
6741/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6742FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6743
6744
6745/** Opcode 0x0f 0xbd. */
6746FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6747{
6748 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6749 IEMOP_HLP_MIN_386();
6750 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6751 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6752}
6753
6754
6755/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6756FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6757
6758
6759/** Opcode 0x0f 0xbe. */
6760FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6761{
6762 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6763 IEMOP_HLP_MIN_386();
6764
6765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6766
6767 /*
6768 * If rm is denoting a register, no more instruction bytes.
6769 */
6770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6771 {
6772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6773 switch (pVCpu->iem.s.enmEffOpSize)
6774 {
6775 case IEMMODE_16BIT:
6776 IEM_MC_BEGIN(0, 1);
6777 IEM_MC_LOCAL(uint16_t, u16Value);
6778 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6779 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6780 IEM_MC_ADVANCE_RIP();
6781 IEM_MC_END();
6782 return VINF_SUCCESS;
6783
6784 case IEMMODE_32BIT:
6785 IEM_MC_BEGIN(0, 1);
6786 IEM_MC_LOCAL(uint32_t, u32Value);
6787 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6788 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6789 IEM_MC_ADVANCE_RIP();
6790 IEM_MC_END();
6791 return VINF_SUCCESS;
6792
6793 case IEMMODE_64BIT:
6794 IEM_MC_BEGIN(0, 1);
6795 IEM_MC_LOCAL(uint64_t, u64Value);
6796 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6797 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6798 IEM_MC_ADVANCE_RIP();
6799 IEM_MC_END();
6800 return VINF_SUCCESS;
6801
6802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6803 }
6804 }
6805 else
6806 {
6807 /*
6808 * We're loading a register from memory.
6809 */
6810 switch (pVCpu->iem.s.enmEffOpSize)
6811 {
6812 case IEMMODE_16BIT:
6813 IEM_MC_BEGIN(0, 2);
6814 IEM_MC_LOCAL(uint16_t, u16Value);
6815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6818 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6819 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6820 IEM_MC_ADVANCE_RIP();
6821 IEM_MC_END();
6822 return VINF_SUCCESS;
6823
6824 case IEMMODE_32BIT:
6825 IEM_MC_BEGIN(0, 2);
6826 IEM_MC_LOCAL(uint32_t, u32Value);
6827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6830 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6831 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6832 IEM_MC_ADVANCE_RIP();
6833 IEM_MC_END();
6834 return VINF_SUCCESS;
6835
6836 case IEMMODE_64BIT:
6837 IEM_MC_BEGIN(0, 2);
6838 IEM_MC_LOCAL(uint64_t, u64Value);
6839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6842 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6843 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6844 IEM_MC_ADVANCE_RIP();
6845 IEM_MC_END();
6846 return VINF_SUCCESS;
6847
6848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6849 }
6850 }
6851}
6852
6853
6854/** Opcode 0x0f 0xbf. */
6855FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6856{
6857 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6858 IEMOP_HLP_MIN_386();
6859
6860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6861
6862 /** @todo Not entirely sure how the operand size prefix is handled here,
6863 * assuming that it will be ignored. Would be nice to have a few
6864 * test for this. */
6865 /*
6866 * If rm is denoting a register, no more instruction bytes.
6867 */
6868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6869 {
6870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6871 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6872 {
6873 IEM_MC_BEGIN(0, 1);
6874 IEM_MC_LOCAL(uint32_t, u32Value);
6875 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6876 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6877 IEM_MC_ADVANCE_RIP();
6878 IEM_MC_END();
6879 }
6880 else
6881 {
6882 IEM_MC_BEGIN(0, 1);
6883 IEM_MC_LOCAL(uint64_t, u64Value);
6884 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6885 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6886 IEM_MC_ADVANCE_RIP();
6887 IEM_MC_END();
6888 }
6889 }
6890 else
6891 {
6892 /*
6893 * We're loading a register from memory.
6894 */
6895 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6896 {
6897 IEM_MC_BEGIN(0, 2);
6898 IEM_MC_LOCAL(uint32_t, u32Value);
6899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6902 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6903 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6904 IEM_MC_ADVANCE_RIP();
6905 IEM_MC_END();
6906 }
6907 else
6908 {
6909 IEM_MC_BEGIN(0, 2);
6910 IEM_MC_LOCAL(uint64_t, u64Value);
6911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6914 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6915 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6916 IEM_MC_ADVANCE_RIP();
6917 IEM_MC_END();
6918 }
6919 }
6920 return VINF_SUCCESS;
6921}
6922
6923
6924/** Opcode 0x0f 0xc0. */
6925FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6926{
6927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6928 IEMOP_HLP_MIN_486();
6929 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6930
6931 /*
6932 * If rm is denoting a register, no more instruction bytes.
6933 */
6934 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6935 {
6936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6937
6938 IEM_MC_BEGIN(3, 0);
6939 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6940 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6941 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6942
6943 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6944 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6945 IEM_MC_REF_EFLAGS(pEFlags);
6946 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6947
6948 IEM_MC_ADVANCE_RIP();
6949 IEM_MC_END();
6950 }
6951 else
6952 {
6953 /*
6954 * We're accessing memory.
6955 */
6956 IEM_MC_BEGIN(3, 3);
6957 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6958 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6959 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6960 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6962
6963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6964 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6965 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6966 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6967 IEM_MC_FETCH_EFLAGS(EFlags);
6968 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6969 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6970 else
6971 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6972
6973 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6974 IEM_MC_COMMIT_EFLAGS(EFlags);
6975 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6976 IEM_MC_ADVANCE_RIP();
6977 IEM_MC_END();
6978 return VINF_SUCCESS;
6979 }
6980 return VINF_SUCCESS;
6981}
6982
6983
6984/** Opcode 0x0f 0xc1. */
6985FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6986{
6987 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6988 IEMOP_HLP_MIN_486();
6989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6990
6991 /*
6992 * If rm is denoting a register, no more instruction bytes.
6993 */
6994 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6995 {
6996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6997
6998 switch (pVCpu->iem.s.enmEffOpSize)
6999 {
7000 case IEMMODE_16BIT:
7001 IEM_MC_BEGIN(3, 0);
7002 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7003 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7005
7006 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7007 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7008 IEM_MC_REF_EFLAGS(pEFlags);
7009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7010
7011 IEM_MC_ADVANCE_RIP();
7012 IEM_MC_END();
7013 return VINF_SUCCESS;
7014
7015 case IEMMODE_32BIT:
7016 IEM_MC_BEGIN(3, 0);
7017 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7018 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7020
7021 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7022 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7023 IEM_MC_REF_EFLAGS(pEFlags);
7024 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7025
7026 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7027 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7028 IEM_MC_ADVANCE_RIP();
7029 IEM_MC_END();
7030 return VINF_SUCCESS;
7031
7032 case IEMMODE_64BIT:
7033 IEM_MC_BEGIN(3, 0);
7034 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7035 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7036 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7037
7038 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7039 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7040 IEM_MC_REF_EFLAGS(pEFlags);
7041 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7042
7043 IEM_MC_ADVANCE_RIP();
7044 IEM_MC_END();
7045 return VINF_SUCCESS;
7046
7047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7048 }
7049 }
7050 else
7051 {
7052 /*
7053 * We're accessing memory.
7054 */
7055 switch (pVCpu->iem.s.enmEffOpSize)
7056 {
7057 case IEMMODE_16BIT:
7058 IEM_MC_BEGIN(3, 3);
7059 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7060 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7061 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7062 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7064
7065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7066 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7067 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7068 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7069 IEM_MC_FETCH_EFLAGS(EFlags);
7070 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7071 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7072 else
7073 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7074
7075 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7076 IEM_MC_COMMIT_EFLAGS(EFlags);
7077 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7078 IEM_MC_ADVANCE_RIP();
7079 IEM_MC_END();
7080 return VINF_SUCCESS;
7081
7082 case IEMMODE_32BIT:
7083 IEM_MC_BEGIN(3, 3);
7084 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7085 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7086 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7087 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7089
7090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7091 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7092 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7093 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7094 IEM_MC_FETCH_EFLAGS(EFlags);
7095 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7096 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7097 else
7098 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7099
7100 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7101 IEM_MC_COMMIT_EFLAGS(EFlags);
7102 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7103 IEM_MC_ADVANCE_RIP();
7104 IEM_MC_END();
7105 return VINF_SUCCESS;
7106
7107 case IEMMODE_64BIT:
7108 IEM_MC_BEGIN(3, 3);
7109 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7110 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7111 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7112 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7114
7115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7116 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7117 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7118 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7119 IEM_MC_FETCH_EFLAGS(EFlags);
7120 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7121 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7122 else
7123 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7124
7125 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7126 IEM_MC_COMMIT_EFLAGS(EFlags);
7127 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7128 IEM_MC_ADVANCE_RIP();
7129 IEM_MC_END();
7130 return VINF_SUCCESS;
7131
7132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7133 }
7134 }
7135}
7136
7137
7138/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7139FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7140/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7141FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7142/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7143FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7144/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7145FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7146
7147
7148/** Opcode 0x0f 0xc3. */
7149FNIEMOP_DEF(iemOp_movnti_My_Gy)
7150{
7151 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7152
7153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7154
7155 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7157 {
7158 switch (pVCpu->iem.s.enmEffOpSize)
7159 {
7160 case IEMMODE_32BIT:
7161 IEM_MC_BEGIN(0, 2);
7162 IEM_MC_LOCAL(uint32_t, u32Value);
7163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7164
7165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7167 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7168 return IEMOP_RAISE_INVALID_OPCODE();
7169
7170 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7171 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7172 IEM_MC_ADVANCE_RIP();
7173 IEM_MC_END();
7174 break;
7175
7176 case IEMMODE_64BIT:
7177 IEM_MC_BEGIN(0, 2);
7178 IEM_MC_LOCAL(uint64_t, u64Value);
7179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7180
7181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7183 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7184 return IEMOP_RAISE_INVALID_OPCODE();
7185
7186 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7187 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7188 IEM_MC_ADVANCE_RIP();
7189 IEM_MC_END();
7190 break;
7191
7192 case IEMMODE_16BIT:
7193 /** @todo check this form. */
7194 return IEMOP_RAISE_INVALID_OPCODE();
7195 }
7196 }
7197 else
7198 return IEMOP_RAISE_INVALID_OPCODE();
7199 return VINF_SUCCESS;
7200}
7201/* Opcode 0x66 0x0f 0xc3 - invalid */
7202/* Opcode 0xf3 0x0f 0xc3 - invalid */
7203/* Opcode 0xf2 0x0f 0xc3 - invalid */
7204
7205/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7206FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7207/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7208FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7209/* Opcode 0xf3 0x0f 0xc4 - invalid */
7210/* Opcode 0xf2 0x0f 0xc4 - invalid */
7211
7212/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7213FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7214/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7215FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7216/* Opcode 0xf3 0x0f 0xc5 - invalid */
7217/* Opcode 0xf2 0x0f 0xc5 - invalid */
7218
7219/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7220FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7221/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7222FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7223/* Opcode 0xf3 0x0f 0xc6 - invalid */
7224/* Opcode 0xf2 0x0f 0xc6 - invalid */
7225
7226
7227/** Opcode 0x0f 0xc7 !11/1. */
7228FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7229{
7230 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7231
7232 IEM_MC_BEGIN(4, 3);
7233 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7234 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7235 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7236 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7237 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7238 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7240
7241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7242 IEMOP_HLP_DONE_DECODING();
7243 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7244
7245 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7246 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7247 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7248
7249 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7250 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7251 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7252
7253 IEM_MC_FETCH_EFLAGS(EFlags);
7254 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7255 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7256 else
7257 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7258
7259 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7260 IEM_MC_COMMIT_EFLAGS(EFlags);
7261 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7262 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7263 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7264 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7265 IEM_MC_ENDIF();
7266 IEM_MC_ADVANCE_RIP();
7267
7268 IEM_MC_END();
7269 return VINF_SUCCESS;
7270}
7271
7272
7273/** Opcode REX.W 0x0f 0xc7 !11/1. */
7274FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7275{
7276 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7277 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7278 {
7279#if 0
7280 RT_NOREF(bRm);
7281 IEMOP_BITCH_ABOUT_STUB();
7282 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7283#else
7284 IEM_MC_BEGIN(4, 3);
7285 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7286 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7287 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7288 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7289 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7290 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7292
7293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7294 IEMOP_HLP_DONE_DECODING();
7295 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7296 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7297
7298 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7299 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7300 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7301
7302 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7303 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7304 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7305
7306 IEM_MC_FETCH_EFLAGS(EFlags);
7307# ifdef RT_ARCH_AMD64
7308 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7309 {
7310 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7311 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7312 else
7313 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7314 }
7315 else
7316# endif
7317 {
7318 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7319 accesses and not all all atomic, which works fine on in UNI CPU guest
7320 configuration (ignoring DMA). If guest SMP is active we have no choice
7321 but to use a rendezvous callback here. Sigh. */
7322 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7323 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7324 else
7325 {
7326 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7327 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7328 }
7329 }
7330
7331 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7332 IEM_MC_COMMIT_EFLAGS(EFlags);
7333 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7334 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7335 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7336 IEM_MC_ENDIF();
7337 IEM_MC_ADVANCE_RIP();
7338
7339 IEM_MC_END();
7340 return VINF_SUCCESS;
7341#endif
7342 }
7343 Log(("cmpxchg16b -> #UD\n"));
7344 return IEMOP_RAISE_INVALID_OPCODE();
7345}
7346
7347
7348/** Opcode 0x0f 0xc7 11/6. */
7349FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7350
7351/** Opcode 0x0f 0xc7 !11/6. */
7352FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7353
7354/** Opcode 0x66 0x0f 0xc7 !11/6. */
7355FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7356
7357/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7358FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7359
7360/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7361FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7362
7363
7364/** Opcode 0x0f 0xc7. */
7365FNIEMOP_DEF(iemOp_Grp9)
7366{
7367 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7369 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7370 {
7371 case 0: case 2: case 3: case 4: case 5:
7372 return IEMOP_RAISE_INVALID_OPCODE();
7373 case 1:
7374 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7375 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7376 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7377 return IEMOP_RAISE_INVALID_OPCODE();
7378 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7379 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7380 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7381 case 6:
7382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7383 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7384 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7385 {
7386 case 0:
7387 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7388 case IEM_OP_PRF_SIZE_OP:
7389 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7390 case IEM_OP_PRF_REPZ:
7391 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7392 default:
7393 return IEMOP_RAISE_INVALID_OPCODE();
7394 }
7395 case 7:
7396 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7397 {
7398 case 0:
7399 case IEM_OP_PRF_REPZ:
7400 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7401 default:
7402 return IEMOP_RAISE_INVALID_OPCODE();
7403 }
7404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7405 }
7406}
7407
7408
7409/**
7410 * Common 'bswap register' helper.
7411 */
7412FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7413{
7414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7415 switch (pVCpu->iem.s.enmEffOpSize)
7416 {
7417 case IEMMODE_16BIT:
7418 IEM_MC_BEGIN(1, 0);
7419 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7420 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7421 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7422 IEM_MC_ADVANCE_RIP();
7423 IEM_MC_END();
7424 return VINF_SUCCESS;
7425
7426 case IEMMODE_32BIT:
7427 IEM_MC_BEGIN(1, 0);
7428 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7429 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7430 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7431 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7432 IEM_MC_ADVANCE_RIP();
7433 IEM_MC_END();
7434 return VINF_SUCCESS;
7435
7436 case IEMMODE_64BIT:
7437 IEM_MC_BEGIN(1, 0);
7438 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7439 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7440 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7441 IEM_MC_ADVANCE_RIP();
7442 IEM_MC_END();
7443 return VINF_SUCCESS;
7444
7445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7446 }
7447}
7448
7449
7450/** Opcode 0x0f 0xc8. */
7451FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7452{
7453 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7454 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7455 prefix. REX.B is the correct prefix it appears. For a parallel
7456 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7457 IEMOP_HLP_MIN_486();
7458 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7459}
7460
7461
7462/** Opcode 0x0f 0xc9. */
7463FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7464{
7465 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7466 IEMOP_HLP_MIN_486();
7467 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7468}
7469
7470
7471/** Opcode 0x0f 0xca. */
7472FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7473{
7474 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7475 IEMOP_HLP_MIN_486();
7476 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7477}
7478
7479
7480/** Opcode 0x0f 0xcb. */
7481FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7482{
7483 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7484 IEMOP_HLP_MIN_486();
7485 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7486}
7487
7488
7489/** Opcode 0x0f 0xcc. */
7490FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7491{
7492 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7493 IEMOP_HLP_MIN_486();
7494 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7495}
7496
7497
7498/** Opcode 0x0f 0xcd. */
7499FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7500{
7501 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7502 IEMOP_HLP_MIN_486();
7503 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7504}
7505
7506
7507/** Opcode 0x0f 0xce. */
7508FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7509{
7510 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7511 IEMOP_HLP_MIN_486();
7512 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7513}
7514
7515
7516/** Opcode 0x0f 0xcf. */
7517FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7518{
7519 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7520 IEMOP_HLP_MIN_486();
7521 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7522}
7523
7524
7525/* Opcode 0x0f 0xd0 - invalid */
7526/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7527FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7528/* Opcode 0xf3 0x0f 0xd0 - invalid */
7529/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7530FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7531
7532/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7533FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7534/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7535FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7536/* Opcode 0xf3 0x0f 0xd1 - invalid */
7537/* Opcode 0xf2 0x0f 0xd1 - invalid */
7538
7539/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7540FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7541/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7542FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7543/* Opcode 0xf3 0x0f 0xd2 - invalid */
7544/* Opcode 0xf2 0x0f 0xd2 - invalid */
7545
7546/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7547FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7548/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7549FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7550/* Opcode 0xf3 0x0f 0xd3 - invalid */
7551/* Opcode 0xf2 0x0f 0xd3 - invalid */
7552
7553/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7554FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7555/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7556FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7557/* Opcode 0xf3 0x0f 0xd4 - invalid */
7558/* Opcode 0xf2 0x0f 0xd4 - invalid */
7559
7560/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7561FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7562/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7563FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7564/* Opcode 0xf3 0x0f 0xd5 - invalid */
7565/* Opcode 0xf2 0x0f 0xd5 - invalid */
7566
7567/* Opcode 0x0f 0xd6 - invalid */
7568/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7569FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7570/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7571FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7572/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7573FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7574#if 0
7575FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7576{
7577 /* Docs says register only. */
7578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7579
7580 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7581 {
7582 case IEM_OP_PRF_SIZE_OP: /* SSE */
7583 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7584 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7585 IEM_MC_BEGIN(2, 0);
7586 IEM_MC_ARG(uint64_t *, pDst, 0);
7587 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7588 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7589 IEM_MC_PREPARE_SSE_USAGE();
7590 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7591 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7592 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7593 IEM_MC_ADVANCE_RIP();
7594 IEM_MC_END();
7595 return VINF_SUCCESS;
7596
7597 case 0: /* MMX */
7598 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7599 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7600 IEM_MC_BEGIN(2, 0);
7601 IEM_MC_ARG(uint64_t *, pDst, 0);
7602 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7603 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7604 IEM_MC_PREPARE_FPU_USAGE();
7605 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7606 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7607 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7608 IEM_MC_ADVANCE_RIP();
7609 IEM_MC_END();
7610 return VINF_SUCCESS;
7611
7612 default:
7613 return IEMOP_RAISE_INVALID_OPCODE();
7614 }
7615}
7616#endif
7617
7618
7619/** Opcode 0x0f 0xd7. */
7620FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7621{
7622 /* Docs says register only. */
7623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7624 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7625 return IEMOP_RAISE_INVALID_OPCODE();
7626
7627 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7628 /** @todo testcase: Check that the instruction implicitly clears the high
7629 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7630 * and opcode modifications are made to work with the whole width (not
7631 * just 128). */
7632 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7633 {
7634 case IEM_OP_PRF_SIZE_OP: /* SSE */
7635 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7636 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7637 IEM_MC_BEGIN(2, 0);
7638 IEM_MC_ARG(uint64_t *, pDst, 0);
7639 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7640 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7641 IEM_MC_PREPARE_SSE_USAGE();
7642 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7643 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7644 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7645 IEM_MC_ADVANCE_RIP();
7646 IEM_MC_END();
7647 return VINF_SUCCESS;
7648
7649 case 0: /* MMX */
7650 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7651 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7652 IEM_MC_BEGIN(2, 0);
7653 IEM_MC_ARG(uint64_t *, pDst, 0);
7654 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7655 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7656 IEM_MC_PREPARE_FPU_USAGE();
7657 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7658 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7659 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7660 IEM_MC_ADVANCE_RIP();
7661 IEM_MC_END();
7662 return VINF_SUCCESS;
7663
7664 default:
7665 return IEMOP_RAISE_INVALID_OPCODE();
7666 }
7667}
7668
7669
7670/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7671FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7672/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7673FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7674/* Opcode 0xf3 0x0f 0xd8 - invalid */
7675/* Opcode 0xf2 0x0f 0xd8 - invalid */
7676
7677/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7678FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7679/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7680FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7681/* Opcode 0xf3 0x0f 0xd9 - invalid */
7682/* Opcode 0xf2 0x0f 0xd9 - invalid */
7683
7684/** Opcode 0x0f 0xda - pminub Pq, Qq */
7685FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7686/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7687FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7688/* Opcode 0xf3 0x0f 0xda - invalid */
7689/* Opcode 0xf2 0x0f 0xda - invalid */
7690
7691/** Opcode 0x0f 0xdb - pand Pq, Qq */
7692FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7693/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7694FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7695/* Opcode 0xf3 0x0f 0xdb - invalid */
7696/* Opcode 0xf2 0x0f 0xdb - invalid */
7697
7698/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7699FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7700/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7701FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7702/* Opcode 0xf3 0x0f 0xdc - invalid */
7703/* Opcode 0xf2 0x0f 0xdc - invalid */
7704
7705/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7706FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7707/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7708FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7709/* Opcode 0xf3 0x0f 0xdd - invalid */
7710/* Opcode 0xf2 0x0f 0xdd - invalid */
7711
7712/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7713FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7714/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7715FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7716/* Opcode 0xf3 0x0f 0xde - invalid */
7717/* Opcode 0xf2 0x0f 0xde - invalid */
7718
7719/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7720FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7721/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7722FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7723/* Opcode 0xf3 0x0f 0xdf - invalid */
7724/* Opcode 0xf2 0x0f 0xdf - invalid */
7725
7726/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7727FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7728/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7729FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7730/* Opcode 0xf3 0x0f 0xe0 - invalid */
7731/* Opcode 0xf2 0x0f 0xe0 - invalid */
7732
7733/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7734FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7735/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7736FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7737/* Opcode 0xf3 0x0f 0xe1 - invalid */
7738/* Opcode 0xf2 0x0f 0xe1 - invalid */
7739
7740/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7741FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7742/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7743FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7744/* Opcode 0xf3 0x0f 0xe2 - invalid */
7745/* Opcode 0xf2 0x0f 0xe2 - invalid */
7746
7747/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7748FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7749/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7750FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7751/* Opcode 0xf3 0x0f 0xe3 - invalid */
7752/* Opcode 0xf2 0x0f 0xe3 - invalid */
7753
7754/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7755FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7756/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7757FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7758/* Opcode 0xf3 0x0f 0xe4 - invalid */
7759/* Opcode 0xf2 0x0f 0xe4 - invalid */
7760
7761/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7762FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7763/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7764FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7765/* Opcode 0xf3 0x0f 0xe5 - invalid */
7766/* Opcode 0xf2 0x0f 0xe5 - invalid */
7767
7768/* Opcode 0x0f 0xe6 - invalid */
7769/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7770FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7771/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7772FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7773/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7774FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7775
7776
7777/** Opcode 0x0f 0xe7. */
7778FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7779{
7780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7781 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7782 {
7783 /*
7784 * Register, memory.
7785 */
7786/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7787 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7788 {
7789
7790 case IEM_OP_PRF_SIZE_OP: /* SSE */
7791 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7792 IEM_MC_BEGIN(0, 2);
7793 IEM_MC_LOCAL(uint128_t, uSrc);
7794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7795
7796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7798 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7799 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7800
7801 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7802 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7803
7804 IEM_MC_ADVANCE_RIP();
7805 IEM_MC_END();
7806 break;
7807
7808 case 0: /* MMX */
7809 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7810 IEM_MC_BEGIN(0, 2);
7811 IEM_MC_LOCAL(uint64_t, uSrc);
7812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7813
7814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7816 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7817 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7818
7819 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7820 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7821
7822 IEM_MC_ADVANCE_RIP();
7823 IEM_MC_END();
7824 break;
7825
7826 default:
7827 return IEMOP_RAISE_INVALID_OPCODE();
7828 }
7829 }
7830 /* The register, register encoding is invalid. */
7831 else
7832 return IEMOP_RAISE_INVALID_OPCODE();
7833 return VINF_SUCCESS;
7834}
7835
7836
7837/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7838FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7839/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7840FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7841/* Opcode 0xf3 0x0f 0xe8 - invalid */
7842/* Opcode 0xf2 0x0f 0xe8 - invalid */
7843
7844/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7845FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7846/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7847FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7848/* Opcode 0xf3 0x0f 0xe9 - invalid */
7849/* Opcode 0xf2 0x0f 0xe9 - invalid */
7850
7851/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7852FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7853/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7854FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7855/* Opcode 0xf3 0x0f 0xea - invalid */
7856/* Opcode 0xf2 0x0f 0xea - invalid */
7857
7858/** Opcode 0x0f 0xeb - por Pq, Qq */
7859FNIEMOP_STUB(iemOp_por_Pq_Qq);
7860/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7861FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7862/* Opcode 0xf3 0x0f 0xeb - invalid */
7863/* Opcode 0xf2 0x0f 0xeb - invalid */
7864
7865/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7866FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7867/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7868FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7869/* Opcode 0xf3 0x0f 0xec - invalid */
7870/* Opcode 0xf2 0x0f 0xec - invalid */
7871
7872/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7873FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7874/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7875FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7876/* Opcode 0xf3 0x0f 0xed - invalid */
7877/* Opcode 0xf2 0x0f 0xed - invalid */
7878
7879/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7880FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7881/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7882FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7883/* Opcode 0xf3 0x0f 0xee - invalid */
7884/* Opcode 0xf2 0x0f 0xee - invalid */
7885
7886
7887/** Opcode 0x0f 0xef. */
7888FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7889{
7890 IEMOP_MNEMONIC(pxor, "pxor");
7891 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7892}
7893/* Opcode 0xf3 0x0f 0xef - invalid */
7894/* Opcode 0xf2 0x0f 0xef - invalid */
7895
7896/* Opcode 0x0f 0xf0 - invalid */
7897/* Opcode 0x66 0x0f 0xf0 - invalid */
7898/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7899FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7900
7901/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7902FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7903/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7904FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7905/* Opcode 0xf2 0x0f 0xf1 - invalid */
7906
7907/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7908FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7909/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7910FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7911/* Opcode 0xf2 0x0f 0xf2 - invalid */
7912
7913/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7914FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7915/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7916FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7917/* Opcode 0xf2 0x0f 0xf3 - invalid */
7918
7919/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7920FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7921/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7922FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7923/* Opcode 0xf2 0x0f 0xf4 - invalid */
7924
7925/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7926FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7927/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7928FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7929/* Opcode 0xf2 0x0f 0xf5 - invalid */
7930
7931/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7932FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7933/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7934FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7935/* Opcode 0xf2 0x0f 0xf6 - invalid */
7936
7937/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7938FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7939/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7940FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7941/* Opcode 0xf2 0x0f 0xf7 - invalid */
7942
7943/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7944FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7945/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7946FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7947/* Opcode 0xf2 0x0f 0xf8 - invalid */
7948
7949/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7950FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7951/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7952FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7953/* Opcode 0xf2 0x0f 0xf9 - invalid */
7954
7955/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7956FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7957/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7958FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7959/* Opcode 0xf2 0x0f 0xfa - invalid */
7960
7961/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7962FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7963/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7964FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7965/* Opcode 0xf2 0x0f 0xfb - invalid */
7966
7967/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7968FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7969/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7970FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7971/* Opcode 0xf2 0x0f 0xfc - invalid */
7972
7973/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7974FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7975/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7976FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7977/* Opcode 0xf2 0x0f 0xfd - invalid */
7978
7979/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7980FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7981/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7982FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7983/* Opcode 0xf2 0x0f 0xfe - invalid */
7984
7985
7986/** Opcode **** 0x0f 0xff - UD0 */
7987FNIEMOP_DEF(iemOp_ud0)
7988{
7989 IEMOP_MNEMONIC(ud0, "ud0");
7990 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7991 {
7992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7993#ifndef TST_IEM_CHECK_MC
7994 RTGCPTR GCPtrEff;
7995 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7996 if (rcStrict != VINF_SUCCESS)
7997 return rcStrict;
7998#endif
7999 IEMOP_HLP_DONE_DECODING();
8000 }
8001 return IEMOP_RAISE_INVALID_OPCODE();
8002}
8003
8004
8005
8006/** Repeats a_fn four times. For decoding tables. */
8007#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
8008
8009IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8010{
8011 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8012 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8013 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8014 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8015 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8016 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8017 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8018 /* 0x06 */ IEMOP_X4(iemOp_clts),
8019 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8020 /* 0x08 */ IEMOP_X4(iemOp_invd),
8021 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8022 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8023 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8024 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8025 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8026 /* 0x0e */ IEMOP_X4(iemOp_femms),
8027 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8028
8029 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8030 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8031 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8032 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8033 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8034 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8035 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8036 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8037 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8038 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8039 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8040 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8041 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8042 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8043 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8044 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8045
8046 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8047 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8048 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8049 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8050 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8051 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8052 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8053 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8054 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8055 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8056 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8057 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8058 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8059 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8060 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8061 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8062
8063 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8064 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8065 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8066 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8067 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8068 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8069 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8070 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8071 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8072 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8073 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8074 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8075 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8076 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8077 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8078 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8079
8080 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8081 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8082 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8083 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8084 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8085 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8086 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8087 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8088 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8089 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8090 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8091 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8092 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8093 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8094 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8095 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8096
8097 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8098 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8099 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8100 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8101 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8102 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8103 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8104 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8105 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8106 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8107 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8108 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8109 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8110 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8111 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8112 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8113
8114 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8115 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8116 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8117 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8118 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8119 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8120 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8121 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8122 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8123 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8124 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8125 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8126 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8127 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8128 /* 0x6e */ IEMOP_X4(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey),
8129 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq),
8130
8131 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
8132 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8133 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8134 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8135 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8136 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8137 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8138 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8139
8140 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8141 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8142 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8143 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8144 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8145 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8146 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8147 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8148
8149 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8150 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8151 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8152 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8153 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8154 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8155 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8156 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8157 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8158 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8159 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8160 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8161 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8162 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8163 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8164 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8165
8166 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8167 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8168 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8169 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8170 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8171 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8172 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8173 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8174 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8175 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8176 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8177 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8178 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8179 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8180 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8181 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8182
8183 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8184 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8185 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8186 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8187 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8188 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8189 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8190 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8191 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8192 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8193 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8194 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8195 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8196 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8197 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8198 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8199
8200 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8201 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8202 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8203 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8204 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8205 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8206 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8207 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8208 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8209 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8210 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8211 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8212 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8213 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8214 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8215 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8216
8217 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8218 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8219 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8220 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8221 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8222 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8223 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8224 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8225 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8226 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8227 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8228 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8229 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8230 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8231 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8232 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8233
8234 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8235 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8237 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8238 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8239 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8240 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8241 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8242 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8243 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8244 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8245 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8246 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8247 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8248 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8249 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8250
8251 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8252 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8253 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8254 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8255 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8256 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8257 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8258 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8259 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8260 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8261 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8262 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8263 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8264 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8265 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8266 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8267
8268 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8269 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8270 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8271 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8272 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8273 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8274 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8275 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8276 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8277 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8278 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8279 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8280 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8281 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8282 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8283 /* 0xff */ IEMOP_X4(iemOp_ud0),
8284};
8285AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8286/** @} */
8287
8288
8289/** @name One byte opcodes.
8290 *
8291 * @{
8292 */
8293
8294/** Opcode 0x00. */
8295FNIEMOP_DEF(iemOp_add_Eb_Gb)
8296{
8297 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8298 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8299}
8300
8301
8302/** Opcode 0x01. */
8303FNIEMOP_DEF(iemOp_add_Ev_Gv)
8304{
8305 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8306 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8307}
8308
8309
8310/** Opcode 0x02. */
8311FNIEMOP_DEF(iemOp_add_Gb_Eb)
8312{
8313 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8314 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8315}
8316
8317
8318/** Opcode 0x03. */
8319FNIEMOP_DEF(iemOp_add_Gv_Ev)
8320{
8321 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8322 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8323}
8324
8325
8326/** Opcode 0x04. */
8327FNIEMOP_DEF(iemOp_add_Al_Ib)
8328{
8329 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8330 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8331}
8332
8333
8334/** Opcode 0x05. */
8335FNIEMOP_DEF(iemOp_add_eAX_Iz)
8336{
8337 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8338 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8339}
8340
8341
8342/** Opcode 0x06. */
8343FNIEMOP_DEF(iemOp_push_ES)
8344{
8345 IEMOP_MNEMONIC(push_es, "push es");
8346 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8347}
8348
8349
8350/** Opcode 0x07. */
8351FNIEMOP_DEF(iemOp_pop_ES)
8352{
8353 IEMOP_MNEMONIC(pop_es, "pop es");
8354 IEMOP_HLP_NO_64BIT();
8355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8356 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8357}
8358
8359
8360/** Opcode 0x08. */
8361FNIEMOP_DEF(iemOp_or_Eb_Gb)
8362{
8363 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8364 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8365 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8366}
8367
8368
8369/** Opcode 0x09. */
8370FNIEMOP_DEF(iemOp_or_Ev_Gv)
8371{
8372 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8373 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8374 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8375}
8376
8377
8378/** Opcode 0x0a. */
8379FNIEMOP_DEF(iemOp_or_Gb_Eb)
8380{
8381 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8382 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8383 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8384}
8385
8386
8387/** Opcode 0x0b. */
8388FNIEMOP_DEF(iemOp_or_Gv_Ev)
8389{
8390 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8391 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8392 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8393}
8394
8395
8396/** Opcode 0x0c. */
8397FNIEMOP_DEF(iemOp_or_Al_Ib)
8398{
8399 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8400 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8401 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8402}
8403
8404
8405/** Opcode 0x0d. */
8406FNIEMOP_DEF(iemOp_or_eAX_Iz)
8407{
8408 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8409 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8410 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8411}
8412
8413
8414/** Opcode 0x0e. */
8415FNIEMOP_DEF(iemOp_push_CS)
8416{
8417 IEMOP_MNEMONIC(push_cs, "push cs");
8418 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8419}
8420
8421
8422/** Opcode 0x0f. */
8423FNIEMOP_DEF(iemOp_2byteEscape)
8424{
8425#ifdef VBOX_STRICT
8426 static bool s_fTested = false;
8427 if (RT_LIKELY(s_fTested)) { /* likely */ }
8428 else
8429 {
8430 s_fTested = true;
8431 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8432 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8433 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8434 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8435 }
8436#endif
8437
8438 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8439
8440 /** @todo PUSH CS on 8086, undefined on 80186. */
8441 IEMOP_HLP_MIN_286();
8442 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8443}
8444
8445/** Opcode 0x10. */
8446FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8447{
8448 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8449 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8450}
8451
8452
8453/** Opcode 0x11. */
8454FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8455{
8456 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8457 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8458}
8459
8460
8461/** Opcode 0x12. */
8462FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8463{
8464 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8465 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8466}
8467
8468
8469/** Opcode 0x13. */
8470FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8471{
8472 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8473 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8474}
8475
8476
8477/** Opcode 0x14. */
8478FNIEMOP_DEF(iemOp_adc_Al_Ib)
8479{
8480 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8481 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8482}
8483
8484
8485/** Opcode 0x15. */
8486FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8487{
8488 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8489 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8490}
8491
8492
8493/** Opcode 0x16. */
8494FNIEMOP_DEF(iemOp_push_SS)
8495{
8496 IEMOP_MNEMONIC(push_ss, "push ss");
8497 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8498}
8499
8500
8501/** Opcode 0x17. */
8502FNIEMOP_DEF(iemOp_pop_SS)
8503{
8504 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506 IEMOP_HLP_NO_64BIT();
8507 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8508}
8509
8510
8511/** Opcode 0x18. */
8512FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8513{
8514 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8515 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8516}
8517
8518
8519/** Opcode 0x19. */
8520FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8521{
8522 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8523 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8524}
8525
8526
8527/** Opcode 0x1a. */
8528FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8529{
8530 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8531 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8532}
8533
8534
8535/** Opcode 0x1b. */
8536FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8537{
8538 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8539 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8540}
8541
8542
8543/** Opcode 0x1c. */
8544FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8545{
8546 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8547 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8548}
8549
8550
8551/** Opcode 0x1d. */
8552FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8553{
8554 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8555 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8556}
8557
8558
8559/** Opcode 0x1e. */
8560FNIEMOP_DEF(iemOp_push_DS)
8561{
8562 IEMOP_MNEMONIC(push_ds, "push ds");
8563 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8564}
8565
8566
8567/** Opcode 0x1f. */
8568FNIEMOP_DEF(iemOp_pop_DS)
8569{
8570 IEMOP_MNEMONIC(pop_ds, "pop ds");
8571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8572 IEMOP_HLP_NO_64BIT();
8573 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8574}
8575
8576
8577/** Opcode 0x20. */
8578FNIEMOP_DEF(iemOp_and_Eb_Gb)
8579{
8580 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8581 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8582 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8583}
8584
8585
8586/** Opcode 0x21. */
8587FNIEMOP_DEF(iemOp_and_Ev_Gv)
8588{
8589 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8592}
8593
8594
8595/** Opcode 0x22. */
8596FNIEMOP_DEF(iemOp_and_Gb_Eb)
8597{
8598 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8599 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8600 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8601}
8602
8603
8604/** Opcode 0x23. */
8605FNIEMOP_DEF(iemOp_and_Gv_Ev)
8606{
8607 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8608 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8609 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8610}
8611
8612
8613/** Opcode 0x24. */
8614FNIEMOP_DEF(iemOp_and_Al_Ib)
8615{
8616 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8617 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8618 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8619}
8620
8621
8622/** Opcode 0x25. */
8623FNIEMOP_DEF(iemOp_and_eAX_Iz)
8624{
8625 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8626 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8627 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8628}
8629
8630
8631/** Opcode 0x26. */
8632FNIEMOP_DEF(iemOp_seg_ES)
8633{
8634 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8635 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8636 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8637
8638 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8639 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8640}
8641
8642
8643/** Opcode 0x27. */
8644FNIEMOP_DEF(iemOp_daa)
8645{
8646 IEMOP_MNEMONIC(daa_AL, "daa AL");
8647 IEMOP_HLP_NO_64BIT();
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8649 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8650 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8651}
8652
8653
8654/** Opcode 0x28. */
8655FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8656{
8657 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8658 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8659}
8660
8661
8662/** Opcode 0x29. */
8663FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8664{
8665 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8666 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8667}
8668
8669
8670/** Opcode 0x2a. */
8671FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8672{
8673 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8674 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8675}
8676
8677
8678/** Opcode 0x2b. */
8679FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8680{
8681 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8682 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8683}
8684
8685
8686/** Opcode 0x2c. */
8687FNIEMOP_DEF(iemOp_sub_Al_Ib)
8688{
8689 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8690 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8691}
8692
8693
8694/** Opcode 0x2d. */
8695FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8696{
8697 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8698 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8699}
8700
8701
8702/** Opcode 0x2e. */
8703FNIEMOP_DEF(iemOp_seg_CS)
8704{
8705 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8706 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8707 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8708
8709 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8710 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8711}
8712
8713
8714/** Opcode 0x2f. */
8715FNIEMOP_DEF(iemOp_das)
8716{
8717 IEMOP_MNEMONIC(das_AL, "das AL");
8718 IEMOP_HLP_NO_64BIT();
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8721 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8722}
8723
8724
8725/** Opcode 0x30. */
8726FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8727{
8728 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8729 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8730 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8731}
8732
8733
8734/** Opcode 0x31. */
8735FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8736{
8737 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8738 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8739 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8740}
8741
8742
8743/** Opcode 0x32. */
8744FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8745{
8746 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8747 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8748 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8749}
8750
8751
8752/** Opcode 0x33. */
8753FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8754{
8755 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8757 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8758}
8759
8760
8761/** Opcode 0x34. */
8762FNIEMOP_DEF(iemOp_xor_Al_Ib)
8763{
8764 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8765 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8766 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8767}
8768
8769
8770/** Opcode 0x35. */
8771FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8772{
8773 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8775 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8776}
8777
8778
8779/** Opcode 0x36. */
8780FNIEMOP_DEF(iemOp_seg_SS)
8781{
8782 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8783 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8784 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8785
8786 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8787 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8788}
8789
8790
8791/** Opcode 0x37. */
8792FNIEMOP_STUB(iemOp_aaa);
8793
8794
8795/** Opcode 0x38. */
8796FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8797{
8798 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8799 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8800}
8801
8802
8803/** Opcode 0x39. */
8804FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8805{
8806 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8807 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8808}
8809
8810
8811/** Opcode 0x3a. */
8812FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8813{
8814 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8816}
8817
8818
8819/** Opcode 0x3b. */
8820FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8821{
8822 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8823 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8824}
8825
8826
8827/** Opcode 0x3c. */
8828FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8829{
8830 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8831 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8832}
8833
8834
8835/** Opcode 0x3d. */
8836FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8837{
8838 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8839 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8840}
8841
8842
8843/** Opcode 0x3e. */
8844FNIEMOP_DEF(iemOp_seg_DS)
8845{
8846 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8847 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8848 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8849
8850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8852}
8853
8854
8855/** Opcode 0x3f. */
8856FNIEMOP_STUB(iemOp_aas);
8857
8858/**
8859 * Common 'inc/dec/not/neg register' helper.
8860 */
8861FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8862{
8863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8864 switch (pVCpu->iem.s.enmEffOpSize)
8865 {
8866 case IEMMODE_16BIT:
8867 IEM_MC_BEGIN(2, 0);
8868 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8869 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8870 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8871 IEM_MC_REF_EFLAGS(pEFlags);
8872 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8873 IEM_MC_ADVANCE_RIP();
8874 IEM_MC_END();
8875 return VINF_SUCCESS;
8876
8877 case IEMMODE_32BIT:
8878 IEM_MC_BEGIN(2, 0);
8879 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8880 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8881 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8882 IEM_MC_REF_EFLAGS(pEFlags);
8883 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8884 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8885 IEM_MC_ADVANCE_RIP();
8886 IEM_MC_END();
8887 return VINF_SUCCESS;
8888
8889 case IEMMODE_64BIT:
8890 IEM_MC_BEGIN(2, 0);
8891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8892 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8893 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8894 IEM_MC_REF_EFLAGS(pEFlags);
8895 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8896 IEM_MC_ADVANCE_RIP();
8897 IEM_MC_END();
8898 return VINF_SUCCESS;
8899 }
8900 return VINF_SUCCESS;
8901}
8902
8903
8904/** Opcode 0x40. */
8905FNIEMOP_DEF(iemOp_inc_eAX)
8906{
8907 /*
8908 * This is a REX prefix in 64-bit mode.
8909 */
8910 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8911 {
8912 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8913 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8914
8915 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8916 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8917 }
8918
8919 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8920 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8921}
8922
8923
8924/** Opcode 0x41. */
8925FNIEMOP_DEF(iemOp_inc_eCX)
8926{
8927 /*
8928 * This is a REX prefix in 64-bit mode.
8929 */
8930 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8931 {
8932 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8933 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8934 pVCpu->iem.s.uRexB = 1 << 3;
8935
8936 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8937 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8938 }
8939
8940 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8941 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8942}
8943
8944
8945/** Opcode 0x42. */
8946FNIEMOP_DEF(iemOp_inc_eDX)
8947{
8948 /*
8949 * This is a REX prefix in 64-bit mode.
8950 */
8951 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8952 {
8953 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8954 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8955 pVCpu->iem.s.uRexIndex = 1 << 3;
8956
8957 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8958 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8959 }
8960
8961 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8962 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8963}
8964
8965
8966
8967/** Opcode 0x43. */
8968FNIEMOP_DEF(iemOp_inc_eBX)
8969{
8970 /*
8971 * This is a REX prefix in 64-bit mode.
8972 */
8973 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8974 {
8975 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8976 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8977 pVCpu->iem.s.uRexB = 1 << 3;
8978 pVCpu->iem.s.uRexIndex = 1 << 3;
8979
8980 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8981 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8982 }
8983
8984 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8985 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8986}
8987
8988
8989/** Opcode 0x44. */
8990FNIEMOP_DEF(iemOp_inc_eSP)
8991{
8992 /*
8993 * This is a REX prefix in 64-bit mode.
8994 */
8995 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8996 {
8997 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8998 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8999 pVCpu->iem.s.uRexReg = 1 << 3;
9000
9001 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9002 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9003 }
9004
9005 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
9006 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
9007}
9008
9009
9010/** Opcode 0x45. */
9011FNIEMOP_DEF(iemOp_inc_eBP)
9012{
9013 /*
9014 * This is a REX prefix in 64-bit mode.
9015 */
9016 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9017 {
9018 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
9019 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
9020 pVCpu->iem.s.uRexReg = 1 << 3;
9021 pVCpu->iem.s.uRexB = 1 << 3;
9022
9023 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9024 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9025 }
9026
9027 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
9028 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
9029}
9030
9031
9032/** Opcode 0x46. */
9033FNIEMOP_DEF(iemOp_inc_eSI)
9034{
9035 /*
9036 * This is a REX prefix in 64-bit mode.
9037 */
9038 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9039 {
9040 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
9041 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
9042 pVCpu->iem.s.uRexReg = 1 << 3;
9043 pVCpu->iem.s.uRexIndex = 1 << 3;
9044
9045 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9046 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9047 }
9048
9049 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
9050 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
9051}
9052
9053
9054/** Opcode 0x47. */
9055FNIEMOP_DEF(iemOp_inc_eDI)
9056{
9057 /*
9058 * This is a REX prefix in 64-bit mode.
9059 */
9060 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9061 {
9062 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9063 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9064 pVCpu->iem.s.uRexReg = 1 << 3;
9065 pVCpu->iem.s.uRexB = 1 << 3;
9066 pVCpu->iem.s.uRexIndex = 1 << 3;
9067
9068 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9069 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9070 }
9071
9072 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9073 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9074}
9075
9076
9077/** Opcode 0x48. */
9078FNIEMOP_DEF(iemOp_dec_eAX)
9079{
9080 /*
9081 * This is a REX prefix in 64-bit mode.
9082 */
9083 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9084 {
9085 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9086 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9087 iemRecalEffOpSize(pVCpu);
9088
9089 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9090 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9091 }
9092
9093 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9094 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9095}
9096
9097
9098/** Opcode 0x49. */
9099FNIEMOP_DEF(iemOp_dec_eCX)
9100{
9101 /*
9102 * This is a REX prefix in 64-bit mode.
9103 */
9104 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9105 {
9106 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9107 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9108 pVCpu->iem.s.uRexB = 1 << 3;
9109 iemRecalEffOpSize(pVCpu);
9110
9111 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9112 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9113 }
9114
9115 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9116 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9117}
9118
9119
9120/** Opcode 0x4a. */
9121FNIEMOP_DEF(iemOp_dec_eDX)
9122{
9123 /*
9124 * This is a REX prefix in 64-bit mode.
9125 */
9126 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9127 {
9128 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9129 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9130 pVCpu->iem.s.uRexIndex = 1 << 3;
9131 iemRecalEffOpSize(pVCpu);
9132
9133 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9134 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9135 }
9136
9137 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9138 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9139}
9140
9141
9142/** Opcode 0x4b. */
9143FNIEMOP_DEF(iemOp_dec_eBX)
9144{
9145 /*
9146 * This is a REX prefix in 64-bit mode.
9147 */
9148 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9149 {
9150 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9151 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9152 pVCpu->iem.s.uRexB = 1 << 3;
9153 pVCpu->iem.s.uRexIndex = 1 << 3;
9154 iemRecalEffOpSize(pVCpu);
9155
9156 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9157 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9158 }
9159
9160 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9161 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9162}
9163
9164
9165/** Opcode 0x4c. */
9166FNIEMOP_DEF(iemOp_dec_eSP)
9167{
9168 /*
9169 * This is a REX prefix in 64-bit mode.
9170 */
9171 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9172 {
9173 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9174 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9175 pVCpu->iem.s.uRexReg = 1 << 3;
9176 iemRecalEffOpSize(pVCpu);
9177
9178 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9179 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9180 }
9181
9182 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9183 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9184}
9185
9186
9187/** Opcode 0x4d. */
9188FNIEMOP_DEF(iemOp_dec_eBP)
9189{
9190 /*
9191 * This is a REX prefix in 64-bit mode.
9192 */
9193 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9194 {
9195 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9196 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9197 pVCpu->iem.s.uRexReg = 1 << 3;
9198 pVCpu->iem.s.uRexB = 1 << 3;
9199 iemRecalEffOpSize(pVCpu);
9200
9201 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9202 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9203 }
9204
9205 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9206 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9207}
9208
9209
9210/** Opcode 0x4e. */
9211FNIEMOP_DEF(iemOp_dec_eSI)
9212{
9213 /*
9214 * This is a REX prefix in 64-bit mode.
9215 */
9216 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9217 {
9218 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9219 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9220 pVCpu->iem.s.uRexReg = 1 << 3;
9221 pVCpu->iem.s.uRexIndex = 1 << 3;
9222 iemRecalEffOpSize(pVCpu);
9223
9224 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9225 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9226 }
9227
9228 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9229 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9230}
9231
9232
9233/** Opcode 0x4f. */
9234FNIEMOP_DEF(iemOp_dec_eDI)
9235{
9236 /*
9237 * This is a REX prefix in 64-bit mode.
9238 */
9239 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9240 {
9241 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9242 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9243 pVCpu->iem.s.uRexReg = 1 << 3;
9244 pVCpu->iem.s.uRexB = 1 << 3;
9245 pVCpu->iem.s.uRexIndex = 1 << 3;
9246 iemRecalEffOpSize(pVCpu);
9247
9248 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9249 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9250 }
9251
9252 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9253 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9254}
9255
9256
9257/**
9258 * Common 'push register' helper.
9259 */
9260FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9261{
9262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9263 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9264 {
9265 iReg |= pVCpu->iem.s.uRexB;
9266 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9267 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9268 }
9269
9270 switch (pVCpu->iem.s.enmEffOpSize)
9271 {
9272 case IEMMODE_16BIT:
9273 IEM_MC_BEGIN(0, 1);
9274 IEM_MC_LOCAL(uint16_t, u16Value);
9275 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9276 IEM_MC_PUSH_U16(u16Value);
9277 IEM_MC_ADVANCE_RIP();
9278 IEM_MC_END();
9279 break;
9280
9281 case IEMMODE_32BIT:
9282 IEM_MC_BEGIN(0, 1);
9283 IEM_MC_LOCAL(uint32_t, u32Value);
9284 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9285 IEM_MC_PUSH_U32(u32Value);
9286 IEM_MC_ADVANCE_RIP();
9287 IEM_MC_END();
9288 break;
9289
9290 case IEMMODE_64BIT:
9291 IEM_MC_BEGIN(0, 1);
9292 IEM_MC_LOCAL(uint64_t, u64Value);
9293 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9294 IEM_MC_PUSH_U64(u64Value);
9295 IEM_MC_ADVANCE_RIP();
9296 IEM_MC_END();
9297 break;
9298 }
9299
9300 return VINF_SUCCESS;
9301}
9302
9303
9304/** Opcode 0x50. */
9305FNIEMOP_DEF(iemOp_push_eAX)
9306{
9307 IEMOP_MNEMONIC(push_rAX, "push rAX");
9308 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9309}
9310
9311
9312/** Opcode 0x51. */
9313FNIEMOP_DEF(iemOp_push_eCX)
9314{
9315 IEMOP_MNEMONIC(push_rCX, "push rCX");
9316 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9317}
9318
9319
9320/** Opcode 0x52. */
9321FNIEMOP_DEF(iemOp_push_eDX)
9322{
9323 IEMOP_MNEMONIC(push_rDX, "push rDX");
9324 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9325}
9326
9327
9328/** Opcode 0x53. */
9329FNIEMOP_DEF(iemOp_push_eBX)
9330{
9331 IEMOP_MNEMONIC(push_rBX, "push rBX");
9332 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9333}
9334
9335
9336/** Opcode 0x54. */
9337FNIEMOP_DEF(iemOp_push_eSP)
9338{
9339 IEMOP_MNEMONIC(push_rSP, "push rSP");
9340 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9341 {
9342 IEM_MC_BEGIN(0, 1);
9343 IEM_MC_LOCAL(uint16_t, u16Value);
9344 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9345 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9346 IEM_MC_PUSH_U16(u16Value);
9347 IEM_MC_ADVANCE_RIP();
9348 IEM_MC_END();
9349 }
9350 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9351}
9352
9353
9354/** Opcode 0x55. */
9355FNIEMOP_DEF(iemOp_push_eBP)
9356{
9357 IEMOP_MNEMONIC(push_rBP, "push rBP");
9358 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9359}
9360
9361
9362/** Opcode 0x56. */
9363FNIEMOP_DEF(iemOp_push_eSI)
9364{
9365 IEMOP_MNEMONIC(push_rSI, "push rSI");
9366 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9367}
9368
9369
9370/** Opcode 0x57. */
9371FNIEMOP_DEF(iemOp_push_eDI)
9372{
9373 IEMOP_MNEMONIC(push_rDI, "push rDI");
9374 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9375}
9376
9377
9378/**
9379 * Common 'pop register' helper.
9380 */
9381FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9382{
9383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9384 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9385 {
9386 iReg |= pVCpu->iem.s.uRexB;
9387 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9388 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9389 }
9390
9391 switch (pVCpu->iem.s.enmEffOpSize)
9392 {
9393 case IEMMODE_16BIT:
9394 IEM_MC_BEGIN(0, 1);
9395 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9396 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9397 IEM_MC_POP_U16(pu16Dst);
9398 IEM_MC_ADVANCE_RIP();
9399 IEM_MC_END();
9400 break;
9401
9402 case IEMMODE_32BIT:
9403 IEM_MC_BEGIN(0, 1);
9404 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9405 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9406 IEM_MC_POP_U32(pu32Dst);
9407 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9408 IEM_MC_ADVANCE_RIP();
9409 IEM_MC_END();
9410 break;
9411
9412 case IEMMODE_64BIT:
9413 IEM_MC_BEGIN(0, 1);
9414 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9415 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9416 IEM_MC_POP_U64(pu64Dst);
9417 IEM_MC_ADVANCE_RIP();
9418 IEM_MC_END();
9419 break;
9420 }
9421
9422 return VINF_SUCCESS;
9423}
9424
9425
9426/** Opcode 0x58. */
9427FNIEMOP_DEF(iemOp_pop_eAX)
9428{
9429 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9430 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9431}
9432
9433
9434/** Opcode 0x59. */
9435FNIEMOP_DEF(iemOp_pop_eCX)
9436{
9437 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9438 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9439}
9440
9441
9442/** Opcode 0x5a. */
9443FNIEMOP_DEF(iemOp_pop_eDX)
9444{
9445 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9446 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9447}
9448
9449
9450/** Opcode 0x5b. */
9451FNIEMOP_DEF(iemOp_pop_eBX)
9452{
9453 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9454 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9455}
9456
9457
9458/** Opcode 0x5c. */
9459FNIEMOP_DEF(iemOp_pop_eSP)
9460{
9461 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9462 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9463 {
9464 if (pVCpu->iem.s.uRexB)
9465 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9466 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9467 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9468 }
9469
9470 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9471 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9472 /** @todo add testcase for this instruction. */
9473 switch (pVCpu->iem.s.enmEffOpSize)
9474 {
9475 case IEMMODE_16BIT:
9476 IEM_MC_BEGIN(0, 1);
9477 IEM_MC_LOCAL(uint16_t, u16Dst);
9478 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9479 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9480 IEM_MC_ADVANCE_RIP();
9481 IEM_MC_END();
9482 break;
9483
9484 case IEMMODE_32BIT:
9485 IEM_MC_BEGIN(0, 1);
9486 IEM_MC_LOCAL(uint32_t, u32Dst);
9487 IEM_MC_POP_U32(&u32Dst);
9488 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9489 IEM_MC_ADVANCE_RIP();
9490 IEM_MC_END();
9491 break;
9492
9493 case IEMMODE_64BIT:
9494 IEM_MC_BEGIN(0, 1);
9495 IEM_MC_LOCAL(uint64_t, u64Dst);
9496 IEM_MC_POP_U64(&u64Dst);
9497 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9498 IEM_MC_ADVANCE_RIP();
9499 IEM_MC_END();
9500 break;
9501 }
9502
9503 return VINF_SUCCESS;
9504}
9505
9506
9507/** Opcode 0x5d. */
9508FNIEMOP_DEF(iemOp_pop_eBP)
9509{
9510 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9511 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9512}
9513
9514
9515/** Opcode 0x5e. */
9516FNIEMOP_DEF(iemOp_pop_eSI)
9517{
9518 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9519 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9520}
9521
9522
9523/** Opcode 0x5f. */
9524FNIEMOP_DEF(iemOp_pop_eDI)
9525{
9526 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9527 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9528}
9529
9530
9531/** Opcode 0x60. */
9532FNIEMOP_DEF(iemOp_pusha)
9533{
9534 IEMOP_MNEMONIC(pusha, "pusha");
9535 IEMOP_HLP_MIN_186();
9536 IEMOP_HLP_NO_64BIT();
9537 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9538 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9539 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9540 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9541}
9542
9543
9544/** Opcode 0x61. */
9545FNIEMOP_DEF(iemOp_popa)
9546{
9547 IEMOP_MNEMONIC(popa, "popa");
9548 IEMOP_HLP_MIN_186();
9549 IEMOP_HLP_NO_64BIT();
9550 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9551 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9552 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9553 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9554}
9555
9556
9557/** Opcode 0x62. */
9558FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9559// IEMOP_HLP_MIN_186();
9560
9561
9562/** Opcode 0x63 - non-64-bit modes. */
9563FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9564{
9565 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9566 IEMOP_HLP_MIN_286();
9567 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9569
9570 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9571 {
9572 /* Register */
9573 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9574 IEM_MC_BEGIN(3, 0);
9575 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9576 IEM_MC_ARG(uint16_t, u16Src, 1);
9577 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9578
9579 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9580 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9581 IEM_MC_REF_EFLAGS(pEFlags);
9582 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9583
9584 IEM_MC_ADVANCE_RIP();
9585 IEM_MC_END();
9586 }
9587 else
9588 {
9589 /* Memory */
9590 IEM_MC_BEGIN(3, 2);
9591 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9592 IEM_MC_ARG(uint16_t, u16Src, 1);
9593 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9595
9596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9597 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9598 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9599 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9600 IEM_MC_FETCH_EFLAGS(EFlags);
9601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9602
9603 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9604 IEM_MC_COMMIT_EFLAGS(EFlags);
9605 IEM_MC_ADVANCE_RIP();
9606 IEM_MC_END();
9607 }
9608 return VINF_SUCCESS;
9609
9610}
9611
9612
9613/** Opcode 0x63.
9614 * @note This is a weird one. It works like a regular move instruction if
9615 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9616 * @todo This definitely needs a testcase to verify the odd cases. */
9617FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9618{
9619 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9620
9621 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9623
9624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9625 {
9626 /*
9627 * Register to register.
9628 */
9629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9630 IEM_MC_BEGIN(0, 1);
9631 IEM_MC_LOCAL(uint64_t, u64Value);
9632 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9633 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9634 IEM_MC_ADVANCE_RIP();
9635 IEM_MC_END();
9636 }
9637 else
9638 {
9639 /*
9640 * We're loading a register from memory.
9641 */
9642 IEM_MC_BEGIN(0, 2);
9643 IEM_MC_LOCAL(uint64_t, u64Value);
9644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9647 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9648 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9649 IEM_MC_ADVANCE_RIP();
9650 IEM_MC_END();
9651 }
9652 return VINF_SUCCESS;
9653}
9654
9655
9656/** Opcode 0x64. */
9657FNIEMOP_DEF(iemOp_seg_FS)
9658{
9659 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9660 IEMOP_HLP_MIN_386();
9661
9662 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9663 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9664
9665 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9666 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9667}
9668
9669
9670/** Opcode 0x65. */
9671FNIEMOP_DEF(iemOp_seg_GS)
9672{
9673 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9674 IEMOP_HLP_MIN_386();
9675
9676 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9677 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9678
9679 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9680 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9681}
9682
9683
9684/** Opcode 0x66. */
9685FNIEMOP_DEF(iemOp_op_size)
9686{
9687 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9688 IEMOP_HLP_MIN_386();
9689
9690 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9691 iemRecalEffOpSize(pVCpu);
9692
9693 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9694 when REPZ or REPNZ are present. */
9695 if (pVCpu->iem.s.idxPrefix == 0)
9696 pVCpu->iem.s.idxPrefix = 1;
9697
9698 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9699 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9700}
9701
9702
9703/** Opcode 0x67. */
9704FNIEMOP_DEF(iemOp_addr_size)
9705{
9706 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9707 IEMOP_HLP_MIN_386();
9708
9709 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9710 switch (pVCpu->iem.s.enmDefAddrMode)
9711 {
9712 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9713 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9714 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9715 default: AssertFailed();
9716 }
9717
9718 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9719 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9720}
9721
9722
9723/** Opcode 0x68. */
9724FNIEMOP_DEF(iemOp_push_Iz)
9725{
9726 IEMOP_MNEMONIC(push_Iz, "push Iz");
9727 IEMOP_HLP_MIN_186();
9728 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9729 switch (pVCpu->iem.s.enmEffOpSize)
9730 {
9731 case IEMMODE_16BIT:
9732 {
9733 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9735 IEM_MC_BEGIN(0,0);
9736 IEM_MC_PUSH_U16(u16Imm);
9737 IEM_MC_ADVANCE_RIP();
9738 IEM_MC_END();
9739 return VINF_SUCCESS;
9740 }
9741
9742 case IEMMODE_32BIT:
9743 {
9744 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9746 IEM_MC_BEGIN(0,0);
9747 IEM_MC_PUSH_U32(u32Imm);
9748 IEM_MC_ADVANCE_RIP();
9749 IEM_MC_END();
9750 return VINF_SUCCESS;
9751 }
9752
9753 case IEMMODE_64BIT:
9754 {
9755 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9757 IEM_MC_BEGIN(0,0);
9758 IEM_MC_PUSH_U64(u64Imm);
9759 IEM_MC_ADVANCE_RIP();
9760 IEM_MC_END();
9761 return VINF_SUCCESS;
9762 }
9763
9764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9765 }
9766}
9767
9768
9769/** Opcode 0x69. */
9770FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9771{
9772 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9773 IEMOP_HLP_MIN_186();
9774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9775 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9776
9777 switch (pVCpu->iem.s.enmEffOpSize)
9778 {
9779 case IEMMODE_16BIT:
9780 {
9781 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9782 {
9783 /* register operand */
9784 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9786
9787 IEM_MC_BEGIN(3, 1);
9788 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9789 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9790 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9791 IEM_MC_LOCAL(uint16_t, u16Tmp);
9792
9793 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9794 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9795 IEM_MC_REF_EFLAGS(pEFlags);
9796 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9797 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9798
9799 IEM_MC_ADVANCE_RIP();
9800 IEM_MC_END();
9801 }
9802 else
9803 {
9804 /* memory operand */
9805 IEM_MC_BEGIN(3, 2);
9806 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9807 IEM_MC_ARG(uint16_t, u16Src, 1);
9808 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9809 IEM_MC_LOCAL(uint16_t, u16Tmp);
9810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9811
9812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9813 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9814 IEM_MC_ASSIGN(u16Src, u16Imm);
9815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9816 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9817 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9818 IEM_MC_REF_EFLAGS(pEFlags);
9819 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9820 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9821
9822 IEM_MC_ADVANCE_RIP();
9823 IEM_MC_END();
9824 }
9825 return VINF_SUCCESS;
9826 }
9827
9828 case IEMMODE_32BIT:
9829 {
9830 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9831 {
9832 /* register operand */
9833 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9835
9836 IEM_MC_BEGIN(3, 1);
9837 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9838 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9839 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9840 IEM_MC_LOCAL(uint32_t, u32Tmp);
9841
9842 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9843 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9844 IEM_MC_REF_EFLAGS(pEFlags);
9845 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9846 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9847
9848 IEM_MC_ADVANCE_RIP();
9849 IEM_MC_END();
9850 }
9851 else
9852 {
9853 /* memory operand */
9854 IEM_MC_BEGIN(3, 2);
9855 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9856 IEM_MC_ARG(uint32_t, u32Src, 1);
9857 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9858 IEM_MC_LOCAL(uint32_t, u32Tmp);
9859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9860
9861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9862 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9863 IEM_MC_ASSIGN(u32Src, u32Imm);
9864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9865 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9866 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9867 IEM_MC_REF_EFLAGS(pEFlags);
9868 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9869 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9870
9871 IEM_MC_ADVANCE_RIP();
9872 IEM_MC_END();
9873 }
9874 return VINF_SUCCESS;
9875 }
9876
9877 case IEMMODE_64BIT:
9878 {
9879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9880 {
9881 /* register operand */
9882 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9884
9885 IEM_MC_BEGIN(3, 1);
9886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9887 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9889 IEM_MC_LOCAL(uint64_t, u64Tmp);
9890
9891 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9892 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9893 IEM_MC_REF_EFLAGS(pEFlags);
9894 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9895 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9896
9897 IEM_MC_ADVANCE_RIP();
9898 IEM_MC_END();
9899 }
9900 else
9901 {
9902 /* memory operand */
9903 IEM_MC_BEGIN(3, 2);
9904 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9905 IEM_MC_ARG(uint64_t, u64Src, 1);
9906 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9907 IEM_MC_LOCAL(uint64_t, u64Tmp);
9908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9909
9910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9911 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9912 IEM_MC_ASSIGN(u64Src, u64Imm);
9913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9914 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9915 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9916 IEM_MC_REF_EFLAGS(pEFlags);
9917 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9918 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9919
9920 IEM_MC_ADVANCE_RIP();
9921 IEM_MC_END();
9922 }
9923 return VINF_SUCCESS;
9924 }
9925 }
9926 AssertFailedReturn(VERR_IEM_IPE_9);
9927}
9928
9929
9930/** Opcode 0x6a. */
9931FNIEMOP_DEF(iemOp_push_Ib)
9932{
9933 IEMOP_MNEMONIC(push_Ib, "push Ib");
9934 IEMOP_HLP_MIN_186();
9935 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9937 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9938
9939 IEM_MC_BEGIN(0,0);
9940 switch (pVCpu->iem.s.enmEffOpSize)
9941 {
9942 case IEMMODE_16BIT:
9943 IEM_MC_PUSH_U16(i8Imm);
9944 break;
9945 case IEMMODE_32BIT:
9946 IEM_MC_PUSH_U32(i8Imm);
9947 break;
9948 case IEMMODE_64BIT:
9949 IEM_MC_PUSH_U64(i8Imm);
9950 break;
9951 }
9952 IEM_MC_ADVANCE_RIP();
9953 IEM_MC_END();
9954 return VINF_SUCCESS;
9955}
9956
9957
9958/** Opcode 0x6b. */
9959FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9960{
9961 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9962 IEMOP_HLP_MIN_186();
9963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9964 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9965
9966 switch (pVCpu->iem.s.enmEffOpSize)
9967 {
9968 case IEMMODE_16BIT:
9969 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9970 {
9971 /* register operand */
9972 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9974
9975 IEM_MC_BEGIN(3, 1);
9976 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9977 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9978 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9979 IEM_MC_LOCAL(uint16_t, u16Tmp);
9980
9981 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9982 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9983 IEM_MC_REF_EFLAGS(pEFlags);
9984 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9985 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9986
9987 IEM_MC_ADVANCE_RIP();
9988 IEM_MC_END();
9989 }
9990 else
9991 {
9992 /* memory operand */
9993 IEM_MC_BEGIN(3, 2);
9994 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9995 IEM_MC_ARG(uint16_t, u16Src, 1);
9996 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9997 IEM_MC_LOCAL(uint16_t, u16Tmp);
9998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9999
10000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10001 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
10002 IEM_MC_ASSIGN(u16Src, u16Imm);
10003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10004 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10005 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10006 IEM_MC_REF_EFLAGS(pEFlags);
10007 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10008 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10009
10010 IEM_MC_ADVANCE_RIP();
10011 IEM_MC_END();
10012 }
10013 return VINF_SUCCESS;
10014
10015 case IEMMODE_32BIT:
10016 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10017 {
10018 /* register operand */
10019 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10021
10022 IEM_MC_BEGIN(3, 1);
10023 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10024 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
10025 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10026 IEM_MC_LOCAL(uint32_t, u32Tmp);
10027
10028 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10029 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10030 IEM_MC_REF_EFLAGS(pEFlags);
10031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10032 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10033
10034 IEM_MC_ADVANCE_RIP();
10035 IEM_MC_END();
10036 }
10037 else
10038 {
10039 /* memory operand */
10040 IEM_MC_BEGIN(3, 2);
10041 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10042 IEM_MC_ARG(uint32_t, u32Src, 1);
10043 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10044 IEM_MC_LOCAL(uint32_t, u32Tmp);
10045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10046
10047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10048 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
10049 IEM_MC_ASSIGN(u32Src, u32Imm);
10050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10051 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10052 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10053 IEM_MC_REF_EFLAGS(pEFlags);
10054 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10055 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10056
10057 IEM_MC_ADVANCE_RIP();
10058 IEM_MC_END();
10059 }
10060 return VINF_SUCCESS;
10061
10062 case IEMMODE_64BIT:
10063 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10064 {
10065 /* register operand */
10066 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10068
10069 IEM_MC_BEGIN(3, 1);
10070 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10071 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10072 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10073 IEM_MC_LOCAL(uint64_t, u64Tmp);
10074
10075 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10076 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10077 IEM_MC_REF_EFLAGS(pEFlags);
10078 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10079 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10080
10081 IEM_MC_ADVANCE_RIP();
10082 IEM_MC_END();
10083 }
10084 else
10085 {
10086 /* memory operand */
10087 IEM_MC_BEGIN(3, 2);
10088 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10089 IEM_MC_ARG(uint64_t, u64Src, 1);
10090 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10091 IEM_MC_LOCAL(uint64_t, u64Tmp);
10092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10093
10094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10095 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10096 IEM_MC_ASSIGN(u64Src, u64Imm);
10097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10098 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10099 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10100 IEM_MC_REF_EFLAGS(pEFlags);
10101 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10102 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10103
10104 IEM_MC_ADVANCE_RIP();
10105 IEM_MC_END();
10106 }
10107 return VINF_SUCCESS;
10108 }
10109 AssertFailedReturn(VERR_IEM_IPE_8);
10110}
10111
10112
10113/** Opcode 0x6c. */
10114FNIEMOP_DEF(iemOp_insb_Yb_DX)
10115{
10116 IEMOP_HLP_MIN_186();
10117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10118 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10119 {
10120 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10121 switch (pVCpu->iem.s.enmEffAddrMode)
10122 {
10123 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10124 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10125 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10127 }
10128 }
10129 else
10130 {
10131 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10132 switch (pVCpu->iem.s.enmEffAddrMode)
10133 {
10134 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10135 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10136 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10138 }
10139 }
10140}
10141
10142
10143/** Opcode 0x6d. */
10144FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10145{
10146 IEMOP_HLP_MIN_186();
10147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10148 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10149 {
10150 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10151 switch (pVCpu->iem.s.enmEffOpSize)
10152 {
10153 case IEMMODE_16BIT:
10154 switch (pVCpu->iem.s.enmEffAddrMode)
10155 {
10156 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10157 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10158 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10160 }
10161 break;
10162 case IEMMODE_64BIT:
10163 case IEMMODE_32BIT:
10164 switch (pVCpu->iem.s.enmEffAddrMode)
10165 {
10166 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10167 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10168 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10170 }
10171 break;
10172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10173 }
10174 }
10175 else
10176 {
10177 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10178 switch (pVCpu->iem.s.enmEffOpSize)
10179 {
10180 case IEMMODE_16BIT:
10181 switch (pVCpu->iem.s.enmEffAddrMode)
10182 {
10183 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10184 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10185 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10187 }
10188 break;
10189 case IEMMODE_64BIT:
10190 case IEMMODE_32BIT:
10191 switch (pVCpu->iem.s.enmEffAddrMode)
10192 {
10193 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10194 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10195 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10197 }
10198 break;
10199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10200 }
10201 }
10202}
10203
10204
10205/** Opcode 0x6e. */
10206FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10207{
10208 IEMOP_HLP_MIN_186();
10209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10210 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10211 {
10212 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10213 switch (pVCpu->iem.s.enmEffAddrMode)
10214 {
10215 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10216 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10217 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10219 }
10220 }
10221 else
10222 {
10223 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10224 switch (pVCpu->iem.s.enmEffAddrMode)
10225 {
10226 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10227 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10228 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10230 }
10231 }
10232}
10233
10234
10235/** Opcode 0x6f. */
10236FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10237{
10238 IEMOP_HLP_MIN_186();
10239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10240 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10241 {
10242 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10243 switch (pVCpu->iem.s.enmEffOpSize)
10244 {
10245 case IEMMODE_16BIT:
10246 switch (pVCpu->iem.s.enmEffAddrMode)
10247 {
10248 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10249 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10250 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10252 }
10253 break;
10254 case IEMMODE_64BIT:
10255 case IEMMODE_32BIT:
10256 switch (pVCpu->iem.s.enmEffAddrMode)
10257 {
10258 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10259 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10260 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10262 }
10263 break;
10264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10265 }
10266 }
10267 else
10268 {
10269 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10270 switch (pVCpu->iem.s.enmEffOpSize)
10271 {
10272 case IEMMODE_16BIT:
10273 switch (pVCpu->iem.s.enmEffAddrMode)
10274 {
10275 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10276 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10277 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10279 }
10280 break;
10281 case IEMMODE_64BIT:
10282 case IEMMODE_32BIT:
10283 switch (pVCpu->iem.s.enmEffAddrMode)
10284 {
10285 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10286 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10287 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10289 }
10290 break;
10291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10292 }
10293 }
10294}
10295
10296
10297/** Opcode 0x70. */
10298FNIEMOP_DEF(iemOp_jo_Jb)
10299{
10300 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10301 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10303 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10304
10305 IEM_MC_BEGIN(0, 0);
10306 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10307 IEM_MC_REL_JMP_S8(i8Imm);
10308 } IEM_MC_ELSE() {
10309 IEM_MC_ADVANCE_RIP();
10310 } IEM_MC_ENDIF();
10311 IEM_MC_END();
10312 return VINF_SUCCESS;
10313}
10314
10315
10316/** Opcode 0x71. */
10317FNIEMOP_DEF(iemOp_jno_Jb)
10318{
10319 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10320 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10322 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10323
10324 IEM_MC_BEGIN(0, 0);
10325 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10326 IEM_MC_ADVANCE_RIP();
10327 } IEM_MC_ELSE() {
10328 IEM_MC_REL_JMP_S8(i8Imm);
10329 } IEM_MC_ENDIF();
10330 IEM_MC_END();
10331 return VINF_SUCCESS;
10332}
10333
10334/** Opcode 0x72. */
10335FNIEMOP_DEF(iemOp_jc_Jb)
10336{
10337 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10338 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10340 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10341
10342 IEM_MC_BEGIN(0, 0);
10343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10344 IEM_MC_REL_JMP_S8(i8Imm);
10345 } IEM_MC_ELSE() {
10346 IEM_MC_ADVANCE_RIP();
10347 } IEM_MC_ENDIF();
10348 IEM_MC_END();
10349 return VINF_SUCCESS;
10350}
10351
10352
10353/** Opcode 0x73. */
10354FNIEMOP_DEF(iemOp_jnc_Jb)
10355{
10356 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10357 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10359 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10360
10361 IEM_MC_BEGIN(0, 0);
10362 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10363 IEM_MC_ADVANCE_RIP();
10364 } IEM_MC_ELSE() {
10365 IEM_MC_REL_JMP_S8(i8Imm);
10366 } IEM_MC_ENDIF();
10367 IEM_MC_END();
10368 return VINF_SUCCESS;
10369}
10370
10371
10372/** Opcode 0x74. */
10373FNIEMOP_DEF(iemOp_je_Jb)
10374{
10375 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10376 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10378 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10379
10380 IEM_MC_BEGIN(0, 0);
10381 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10382 IEM_MC_REL_JMP_S8(i8Imm);
10383 } IEM_MC_ELSE() {
10384 IEM_MC_ADVANCE_RIP();
10385 } IEM_MC_ENDIF();
10386 IEM_MC_END();
10387 return VINF_SUCCESS;
10388}
10389
10390
10391/** Opcode 0x75. */
10392FNIEMOP_DEF(iemOp_jne_Jb)
10393{
10394 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10395 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10397 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10398
10399 IEM_MC_BEGIN(0, 0);
10400 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10401 IEM_MC_ADVANCE_RIP();
10402 } IEM_MC_ELSE() {
10403 IEM_MC_REL_JMP_S8(i8Imm);
10404 } IEM_MC_ENDIF();
10405 IEM_MC_END();
10406 return VINF_SUCCESS;
10407}
10408
10409
10410/** Opcode 0x76. */
10411FNIEMOP_DEF(iemOp_jbe_Jb)
10412{
10413 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10414 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10416 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10417
10418 IEM_MC_BEGIN(0, 0);
10419 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10420 IEM_MC_REL_JMP_S8(i8Imm);
10421 } IEM_MC_ELSE() {
10422 IEM_MC_ADVANCE_RIP();
10423 } IEM_MC_ENDIF();
10424 IEM_MC_END();
10425 return VINF_SUCCESS;
10426}
10427
10428
10429/** Opcode 0x77. */
10430FNIEMOP_DEF(iemOp_jnbe_Jb)
10431{
10432 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10433 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10435 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10436
10437 IEM_MC_BEGIN(0, 0);
10438 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10439 IEM_MC_ADVANCE_RIP();
10440 } IEM_MC_ELSE() {
10441 IEM_MC_REL_JMP_S8(i8Imm);
10442 } IEM_MC_ENDIF();
10443 IEM_MC_END();
10444 return VINF_SUCCESS;
10445}
10446
10447
10448/** Opcode 0x78. */
10449FNIEMOP_DEF(iemOp_js_Jb)
10450{
10451 IEMOP_MNEMONIC(js_Jb, "js Jb");
10452 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10454 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10455
10456 IEM_MC_BEGIN(0, 0);
10457 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10458 IEM_MC_REL_JMP_S8(i8Imm);
10459 } IEM_MC_ELSE() {
10460 IEM_MC_ADVANCE_RIP();
10461 } IEM_MC_ENDIF();
10462 IEM_MC_END();
10463 return VINF_SUCCESS;
10464}
10465
10466
10467/** Opcode 0x79. */
10468FNIEMOP_DEF(iemOp_jns_Jb)
10469{
10470 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10471 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10473 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10474
10475 IEM_MC_BEGIN(0, 0);
10476 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10477 IEM_MC_ADVANCE_RIP();
10478 } IEM_MC_ELSE() {
10479 IEM_MC_REL_JMP_S8(i8Imm);
10480 } IEM_MC_ENDIF();
10481 IEM_MC_END();
10482 return VINF_SUCCESS;
10483}
10484
10485
10486/** Opcode 0x7a. */
10487FNIEMOP_DEF(iemOp_jp_Jb)
10488{
10489 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10490 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10492 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10493
10494 IEM_MC_BEGIN(0, 0);
10495 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10496 IEM_MC_REL_JMP_S8(i8Imm);
10497 } IEM_MC_ELSE() {
10498 IEM_MC_ADVANCE_RIP();
10499 } IEM_MC_ENDIF();
10500 IEM_MC_END();
10501 return VINF_SUCCESS;
10502}
10503
10504
10505/** Opcode 0x7b. */
10506FNIEMOP_DEF(iemOp_jnp_Jb)
10507{
10508 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10509 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10511 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10512
10513 IEM_MC_BEGIN(0, 0);
10514 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10515 IEM_MC_ADVANCE_RIP();
10516 } IEM_MC_ELSE() {
10517 IEM_MC_REL_JMP_S8(i8Imm);
10518 } IEM_MC_ENDIF();
10519 IEM_MC_END();
10520 return VINF_SUCCESS;
10521}
10522
10523
10524/** Opcode 0x7c. */
10525FNIEMOP_DEF(iemOp_jl_Jb)
10526{
10527 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10528 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10530 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10531
10532 IEM_MC_BEGIN(0, 0);
10533 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10534 IEM_MC_REL_JMP_S8(i8Imm);
10535 } IEM_MC_ELSE() {
10536 IEM_MC_ADVANCE_RIP();
10537 } IEM_MC_ENDIF();
10538 IEM_MC_END();
10539 return VINF_SUCCESS;
10540}
10541
10542
10543/** Opcode 0x7d. */
10544FNIEMOP_DEF(iemOp_jnl_Jb)
10545{
10546 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10547 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10550
10551 IEM_MC_BEGIN(0, 0);
10552 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10553 IEM_MC_ADVANCE_RIP();
10554 } IEM_MC_ELSE() {
10555 IEM_MC_REL_JMP_S8(i8Imm);
10556 } IEM_MC_ENDIF();
10557 IEM_MC_END();
10558 return VINF_SUCCESS;
10559}
10560
10561
10562/** Opcode 0x7e. */
10563FNIEMOP_DEF(iemOp_jle_Jb)
10564{
10565 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10566 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10568 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10569
10570 IEM_MC_BEGIN(0, 0);
10571 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10572 IEM_MC_REL_JMP_S8(i8Imm);
10573 } IEM_MC_ELSE() {
10574 IEM_MC_ADVANCE_RIP();
10575 } IEM_MC_ENDIF();
10576 IEM_MC_END();
10577 return VINF_SUCCESS;
10578}
10579
10580
10581/** Opcode 0x7f. */
10582FNIEMOP_DEF(iemOp_jnle_Jb)
10583{
10584 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10585 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10587 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10588
10589 IEM_MC_BEGIN(0, 0);
10590 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10591 IEM_MC_ADVANCE_RIP();
10592 } IEM_MC_ELSE() {
10593 IEM_MC_REL_JMP_S8(i8Imm);
10594 } IEM_MC_ENDIF();
10595 IEM_MC_END();
10596 return VINF_SUCCESS;
10597}
10598
10599
10600/** Opcode 0x80. */
10601FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10602{
10603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10604 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10605 {
10606 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10607 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10608 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10609 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10610 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10611 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10612 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10613 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10614 }
10615 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10616
10617 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10618 {
10619 /* register target */
10620 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10622 IEM_MC_BEGIN(3, 0);
10623 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10624 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10625 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10626
10627 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10628 IEM_MC_REF_EFLAGS(pEFlags);
10629 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10630
10631 IEM_MC_ADVANCE_RIP();
10632 IEM_MC_END();
10633 }
10634 else
10635 {
10636 /* memory target */
10637 uint32_t fAccess;
10638 if (pImpl->pfnLockedU8)
10639 fAccess = IEM_ACCESS_DATA_RW;
10640 else /* CMP */
10641 fAccess = IEM_ACCESS_DATA_R;
10642 IEM_MC_BEGIN(3, 2);
10643 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10644 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10646
10647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10648 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10649 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10650 if (pImpl->pfnLockedU8)
10651 IEMOP_HLP_DONE_DECODING();
10652 else
10653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10654
10655 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10656 IEM_MC_FETCH_EFLAGS(EFlags);
10657 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10658 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10659 else
10660 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10661
10662 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10663 IEM_MC_COMMIT_EFLAGS(EFlags);
10664 IEM_MC_ADVANCE_RIP();
10665 IEM_MC_END();
10666 }
10667 return VINF_SUCCESS;
10668}
10669
10670
10671/** Opcode 0x81. */
10672FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10673{
10674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10675 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10676 {
10677 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10678 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10679 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10680 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10681 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10682 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10683 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10684 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10685 }
10686 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10687
10688 switch (pVCpu->iem.s.enmEffOpSize)
10689 {
10690 case IEMMODE_16BIT:
10691 {
10692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10693 {
10694 /* register target */
10695 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10697 IEM_MC_BEGIN(3, 0);
10698 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10699 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10700 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10701
10702 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10703 IEM_MC_REF_EFLAGS(pEFlags);
10704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10705
10706 IEM_MC_ADVANCE_RIP();
10707 IEM_MC_END();
10708 }
10709 else
10710 {
10711 /* memory target */
10712 uint32_t fAccess;
10713 if (pImpl->pfnLockedU16)
10714 fAccess = IEM_ACCESS_DATA_RW;
10715 else /* CMP, TEST */
10716 fAccess = IEM_ACCESS_DATA_R;
10717 IEM_MC_BEGIN(3, 2);
10718 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10719 IEM_MC_ARG(uint16_t, u16Src, 1);
10720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10722
10723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10724 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10725 IEM_MC_ASSIGN(u16Src, u16Imm);
10726 if (pImpl->pfnLockedU16)
10727 IEMOP_HLP_DONE_DECODING();
10728 else
10729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10730 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10731 IEM_MC_FETCH_EFLAGS(EFlags);
10732 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10733 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10734 else
10735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10736
10737 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10738 IEM_MC_COMMIT_EFLAGS(EFlags);
10739 IEM_MC_ADVANCE_RIP();
10740 IEM_MC_END();
10741 }
10742 break;
10743 }
10744
10745 case IEMMODE_32BIT:
10746 {
10747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10748 {
10749 /* register target */
10750 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10752 IEM_MC_BEGIN(3, 0);
10753 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10754 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10755 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10756
10757 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10758 IEM_MC_REF_EFLAGS(pEFlags);
10759 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10760 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10761
10762 IEM_MC_ADVANCE_RIP();
10763 IEM_MC_END();
10764 }
10765 else
10766 {
10767 /* memory target */
10768 uint32_t fAccess;
10769 if (pImpl->pfnLockedU32)
10770 fAccess = IEM_ACCESS_DATA_RW;
10771 else /* CMP, TEST */
10772 fAccess = IEM_ACCESS_DATA_R;
10773 IEM_MC_BEGIN(3, 2);
10774 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10775 IEM_MC_ARG(uint32_t, u32Src, 1);
10776 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10778
10779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10780 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10781 IEM_MC_ASSIGN(u32Src, u32Imm);
10782 if (pImpl->pfnLockedU32)
10783 IEMOP_HLP_DONE_DECODING();
10784 else
10785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10786 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10787 IEM_MC_FETCH_EFLAGS(EFlags);
10788 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10789 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10790 else
10791 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10792
10793 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10794 IEM_MC_COMMIT_EFLAGS(EFlags);
10795 IEM_MC_ADVANCE_RIP();
10796 IEM_MC_END();
10797 }
10798 break;
10799 }
10800
10801 case IEMMODE_64BIT:
10802 {
10803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10804 {
10805 /* register target */
10806 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10808 IEM_MC_BEGIN(3, 0);
10809 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10810 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10811 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10812
10813 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10814 IEM_MC_REF_EFLAGS(pEFlags);
10815 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10816
10817 IEM_MC_ADVANCE_RIP();
10818 IEM_MC_END();
10819 }
10820 else
10821 {
10822 /* memory target */
10823 uint32_t fAccess;
10824 if (pImpl->pfnLockedU64)
10825 fAccess = IEM_ACCESS_DATA_RW;
10826 else /* CMP */
10827 fAccess = IEM_ACCESS_DATA_R;
10828 IEM_MC_BEGIN(3, 2);
10829 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10830 IEM_MC_ARG(uint64_t, u64Src, 1);
10831 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10833
10834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10835 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10836 if (pImpl->pfnLockedU64)
10837 IEMOP_HLP_DONE_DECODING();
10838 else
10839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10840 IEM_MC_ASSIGN(u64Src, u64Imm);
10841 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10842 IEM_MC_FETCH_EFLAGS(EFlags);
10843 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10844 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10845 else
10846 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10847
10848 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10849 IEM_MC_COMMIT_EFLAGS(EFlags);
10850 IEM_MC_ADVANCE_RIP();
10851 IEM_MC_END();
10852 }
10853 break;
10854 }
10855 }
10856 return VINF_SUCCESS;
10857}
10858
10859
10860/** Opcode 0x82. */
10861FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10862{
10863 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10864 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10865}
10866
10867
10868/** Opcode 0x83. */
10869FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10870{
10871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10872 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10873 {
10874 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10875 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10876 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10877 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10878 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10879 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10880 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10881 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10882 }
10883 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10884 to the 386 even if absent in the intel reference manuals and some
10885 3rd party opcode listings. */
10886 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10887
10888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10889 {
10890 /*
10891 * Register target
10892 */
10893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10894 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10895 switch (pVCpu->iem.s.enmEffOpSize)
10896 {
10897 case IEMMODE_16BIT:
10898 {
10899 IEM_MC_BEGIN(3, 0);
10900 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10901 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10903
10904 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10905 IEM_MC_REF_EFLAGS(pEFlags);
10906 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10907
10908 IEM_MC_ADVANCE_RIP();
10909 IEM_MC_END();
10910 break;
10911 }
10912
10913 case IEMMODE_32BIT:
10914 {
10915 IEM_MC_BEGIN(3, 0);
10916 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10917 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10918 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10919
10920 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10921 IEM_MC_REF_EFLAGS(pEFlags);
10922 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10923 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10924
10925 IEM_MC_ADVANCE_RIP();
10926 IEM_MC_END();
10927 break;
10928 }
10929
10930 case IEMMODE_64BIT:
10931 {
10932 IEM_MC_BEGIN(3, 0);
10933 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10934 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10936
10937 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10938 IEM_MC_REF_EFLAGS(pEFlags);
10939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10940
10941 IEM_MC_ADVANCE_RIP();
10942 IEM_MC_END();
10943 break;
10944 }
10945 }
10946 }
10947 else
10948 {
10949 /*
10950 * Memory target.
10951 */
10952 uint32_t fAccess;
10953 if (pImpl->pfnLockedU16)
10954 fAccess = IEM_ACCESS_DATA_RW;
10955 else /* CMP */
10956 fAccess = IEM_ACCESS_DATA_R;
10957
10958 switch (pVCpu->iem.s.enmEffOpSize)
10959 {
10960 case IEMMODE_16BIT:
10961 {
10962 IEM_MC_BEGIN(3, 2);
10963 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10964 IEM_MC_ARG(uint16_t, u16Src, 1);
10965 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10967
10968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10969 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10970 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10971 if (pImpl->pfnLockedU16)
10972 IEMOP_HLP_DONE_DECODING();
10973 else
10974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10975 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10976 IEM_MC_FETCH_EFLAGS(EFlags);
10977 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10978 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10979 else
10980 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10981
10982 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10983 IEM_MC_COMMIT_EFLAGS(EFlags);
10984 IEM_MC_ADVANCE_RIP();
10985 IEM_MC_END();
10986 break;
10987 }
10988
10989 case IEMMODE_32BIT:
10990 {
10991 IEM_MC_BEGIN(3, 2);
10992 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10993 IEM_MC_ARG(uint32_t, u32Src, 1);
10994 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10996
10997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10998 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10999 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
11000 if (pImpl->pfnLockedU32)
11001 IEMOP_HLP_DONE_DECODING();
11002 else
11003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11004 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11005 IEM_MC_FETCH_EFLAGS(EFlags);
11006 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11008 else
11009 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
11010
11011 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
11012 IEM_MC_COMMIT_EFLAGS(EFlags);
11013 IEM_MC_ADVANCE_RIP();
11014 IEM_MC_END();
11015 break;
11016 }
11017
11018 case IEMMODE_64BIT:
11019 {
11020 IEM_MC_BEGIN(3, 2);
11021 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11022 IEM_MC_ARG(uint64_t, u64Src, 1);
11023 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11025
11026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11027 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11028 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
11029 if (pImpl->pfnLockedU64)
11030 IEMOP_HLP_DONE_DECODING();
11031 else
11032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11033 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11034 IEM_MC_FETCH_EFLAGS(EFlags);
11035 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11036 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11037 else
11038 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11039
11040 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11041 IEM_MC_COMMIT_EFLAGS(EFlags);
11042 IEM_MC_ADVANCE_RIP();
11043 IEM_MC_END();
11044 break;
11045 }
11046 }
11047 }
11048 return VINF_SUCCESS;
11049}
11050
11051
11052/** Opcode 0x84. */
11053FNIEMOP_DEF(iemOp_test_Eb_Gb)
11054{
11055 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11056 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11057 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11058}
11059
11060
11061/** Opcode 0x85. */
11062FNIEMOP_DEF(iemOp_test_Ev_Gv)
11063{
11064 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11065 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11066 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11067}
11068
11069
11070/** Opcode 0x86. */
11071FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11072{
11073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11074 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11075
11076 /*
11077 * If rm is denoting a register, no more instruction bytes.
11078 */
11079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11080 {
11081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11082
11083 IEM_MC_BEGIN(0, 2);
11084 IEM_MC_LOCAL(uint8_t, uTmp1);
11085 IEM_MC_LOCAL(uint8_t, uTmp2);
11086
11087 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11088 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11089 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11090 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11091
11092 IEM_MC_ADVANCE_RIP();
11093 IEM_MC_END();
11094 }
11095 else
11096 {
11097 /*
11098 * We're accessing memory.
11099 */
11100/** @todo the register must be committed separately! */
11101 IEM_MC_BEGIN(2, 2);
11102 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11103 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11105
11106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11107 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11108 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11109 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11111
11112 IEM_MC_ADVANCE_RIP();
11113 IEM_MC_END();
11114 }
11115 return VINF_SUCCESS;
11116}
11117
11118
11119/** Opcode 0x87. */
11120FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11121{
11122 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11124
11125 /*
11126 * If rm is denoting a register, no more instruction bytes.
11127 */
11128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11129 {
11130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11131
11132 switch (pVCpu->iem.s.enmEffOpSize)
11133 {
11134 case IEMMODE_16BIT:
11135 IEM_MC_BEGIN(0, 2);
11136 IEM_MC_LOCAL(uint16_t, uTmp1);
11137 IEM_MC_LOCAL(uint16_t, uTmp2);
11138
11139 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11140 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11141 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11142 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11143
11144 IEM_MC_ADVANCE_RIP();
11145 IEM_MC_END();
11146 return VINF_SUCCESS;
11147
11148 case IEMMODE_32BIT:
11149 IEM_MC_BEGIN(0, 2);
11150 IEM_MC_LOCAL(uint32_t, uTmp1);
11151 IEM_MC_LOCAL(uint32_t, uTmp2);
11152
11153 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11154 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11155 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11156 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11157
11158 IEM_MC_ADVANCE_RIP();
11159 IEM_MC_END();
11160 return VINF_SUCCESS;
11161
11162 case IEMMODE_64BIT:
11163 IEM_MC_BEGIN(0, 2);
11164 IEM_MC_LOCAL(uint64_t, uTmp1);
11165 IEM_MC_LOCAL(uint64_t, uTmp2);
11166
11167 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11168 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11169 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11170 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11171
11172 IEM_MC_ADVANCE_RIP();
11173 IEM_MC_END();
11174 return VINF_SUCCESS;
11175
11176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11177 }
11178 }
11179 else
11180 {
11181 /*
11182 * We're accessing memory.
11183 */
11184 switch (pVCpu->iem.s.enmEffOpSize)
11185 {
11186/** @todo the register must be committed separately! */
11187 case IEMMODE_16BIT:
11188 IEM_MC_BEGIN(2, 2);
11189 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11190 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11192
11193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11194 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11195 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11196 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11197 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11198
11199 IEM_MC_ADVANCE_RIP();
11200 IEM_MC_END();
11201 return VINF_SUCCESS;
11202
11203 case IEMMODE_32BIT:
11204 IEM_MC_BEGIN(2, 2);
11205 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11206 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11208
11209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11210 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11211 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11212 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11214
11215 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11216 IEM_MC_ADVANCE_RIP();
11217 IEM_MC_END();
11218 return VINF_SUCCESS;
11219
11220 case IEMMODE_64BIT:
11221 IEM_MC_BEGIN(2, 2);
11222 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11223 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11225
11226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11227 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11228 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11229 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11230 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11231
11232 IEM_MC_ADVANCE_RIP();
11233 IEM_MC_END();
11234 return VINF_SUCCESS;
11235
11236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11237 }
11238 }
11239}
11240
11241
11242/** Opcode 0x88. */
11243FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11244{
11245 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11246
11247 uint8_t bRm;
11248 IEM_OPCODE_GET_NEXT_U8(&bRm);
11249
11250 /*
11251 * If rm is denoting a register, no more instruction bytes.
11252 */
11253 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11254 {
11255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11256 IEM_MC_BEGIN(0, 1);
11257 IEM_MC_LOCAL(uint8_t, u8Value);
11258 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11259 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11260 IEM_MC_ADVANCE_RIP();
11261 IEM_MC_END();
11262 }
11263 else
11264 {
11265 /*
11266 * We're writing a register to memory.
11267 */
11268 IEM_MC_BEGIN(0, 2);
11269 IEM_MC_LOCAL(uint8_t, u8Value);
11270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11273 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11274 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11275 IEM_MC_ADVANCE_RIP();
11276 IEM_MC_END();
11277 }
11278 return VINF_SUCCESS;
11279
11280}
11281
11282
11283/** Opcode 0x89. */
11284FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11285{
11286 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11287
11288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11289
11290 /*
11291 * If rm is denoting a register, no more instruction bytes.
11292 */
11293 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11294 {
11295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11296 switch (pVCpu->iem.s.enmEffOpSize)
11297 {
11298 case IEMMODE_16BIT:
11299 IEM_MC_BEGIN(0, 1);
11300 IEM_MC_LOCAL(uint16_t, u16Value);
11301 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11302 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11303 IEM_MC_ADVANCE_RIP();
11304 IEM_MC_END();
11305 break;
11306
11307 case IEMMODE_32BIT:
11308 IEM_MC_BEGIN(0, 1);
11309 IEM_MC_LOCAL(uint32_t, u32Value);
11310 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11311 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11312 IEM_MC_ADVANCE_RIP();
11313 IEM_MC_END();
11314 break;
11315
11316 case IEMMODE_64BIT:
11317 IEM_MC_BEGIN(0, 1);
11318 IEM_MC_LOCAL(uint64_t, u64Value);
11319 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11320 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11321 IEM_MC_ADVANCE_RIP();
11322 IEM_MC_END();
11323 break;
11324 }
11325 }
11326 else
11327 {
11328 /*
11329 * We're writing a register to memory.
11330 */
11331 switch (pVCpu->iem.s.enmEffOpSize)
11332 {
11333 case IEMMODE_16BIT:
11334 IEM_MC_BEGIN(0, 2);
11335 IEM_MC_LOCAL(uint16_t, u16Value);
11336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11339 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11340 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11341 IEM_MC_ADVANCE_RIP();
11342 IEM_MC_END();
11343 break;
11344
11345 case IEMMODE_32BIT:
11346 IEM_MC_BEGIN(0, 2);
11347 IEM_MC_LOCAL(uint32_t, u32Value);
11348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11351 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11352 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11353 IEM_MC_ADVANCE_RIP();
11354 IEM_MC_END();
11355 break;
11356
11357 case IEMMODE_64BIT:
11358 IEM_MC_BEGIN(0, 2);
11359 IEM_MC_LOCAL(uint64_t, u64Value);
11360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11363 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11364 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11365 IEM_MC_ADVANCE_RIP();
11366 IEM_MC_END();
11367 break;
11368 }
11369 }
11370 return VINF_SUCCESS;
11371}
11372
11373
11374/** Opcode 0x8a. */
11375FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11376{
11377 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11378
11379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11380
11381 /*
11382 * If rm is denoting a register, no more instruction bytes.
11383 */
11384 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11385 {
11386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11387 IEM_MC_BEGIN(0, 1);
11388 IEM_MC_LOCAL(uint8_t, u8Value);
11389 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11390 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11391 IEM_MC_ADVANCE_RIP();
11392 IEM_MC_END();
11393 }
11394 else
11395 {
11396 /*
11397 * We're loading a register from memory.
11398 */
11399 IEM_MC_BEGIN(0, 2);
11400 IEM_MC_LOCAL(uint8_t, u8Value);
11401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11404 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11405 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11406 IEM_MC_ADVANCE_RIP();
11407 IEM_MC_END();
11408 }
11409 return VINF_SUCCESS;
11410}
11411
11412
11413/** Opcode 0x8b. */
11414FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11415{
11416 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11417
11418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11419
11420 /*
11421 * If rm is denoting a register, no more instruction bytes.
11422 */
11423 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11424 {
11425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11426 switch (pVCpu->iem.s.enmEffOpSize)
11427 {
11428 case IEMMODE_16BIT:
11429 IEM_MC_BEGIN(0, 1);
11430 IEM_MC_LOCAL(uint16_t, u16Value);
11431 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11432 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11433 IEM_MC_ADVANCE_RIP();
11434 IEM_MC_END();
11435 break;
11436
11437 case IEMMODE_32BIT:
11438 IEM_MC_BEGIN(0, 1);
11439 IEM_MC_LOCAL(uint32_t, u32Value);
11440 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11441 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11442 IEM_MC_ADVANCE_RIP();
11443 IEM_MC_END();
11444 break;
11445
11446 case IEMMODE_64BIT:
11447 IEM_MC_BEGIN(0, 1);
11448 IEM_MC_LOCAL(uint64_t, u64Value);
11449 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11450 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11451 IEM_MC_ADVANCE_RIP();
11452 IEM_MC_END();
11453 break;
11454 }
11455 }
11456 else
11457 {
11458 /*
11459 * We're loading a register from memory.
11460 */
11461 switch (pVCpu->iem.s.enmEffOpSize)
11462 {
11463 case IEMMODE_16BIT:
11464 IEM_MC_BEGIN(0, 2);
11465 IEM_MC_LOCAL(uint16_t, u16Value);
11466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11469 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11470 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11471 IEM_MC_ADVANCE_RIP();
11472 IEM_MC_END();
11473 break;
11474
11475 case IEMMODE_32BIT:
11476 IEM_MC_BEGIN(0, 2);
11477 IEM_MC_LOCAL(uint32_t, u32Value);
11478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11481 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11482 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11483 IEM_MC_ADVANCE_RIP();
11484 IEM_MC_END();
11485 break;
11486
11487 case IEMMODE_64BIT:
11488 IEM_MC_BEGIN(0, 2);
11489 IEM_MC_LOCAL(uint64_t, u64Value);
11490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11493 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11494 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11495 IEM_MC_ADVANCE_RIP();
11496 IEM_MC_END();
11497 break;
11498 }
11499 }
11500 return VINF_SUCCESS;
11501}
11502
11503
11504/** Opcode 0x63. */
11505FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11506{
11507 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11508 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11509 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11510 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11511 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11512}
11513
11514
11515/** Opcode 0x8c. */
11516FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11517{
11518 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11519
11520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11521
11522 /*
11523 * Check that the destination register exists. The REX.R prefix is ignored.
11524 */
11525 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11526 if ( iSegReg > X86_SREG_GS)
11527 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11528
11529 /*
11530 * If rm is denoting a register, no more instruction bytes.
11531 * In that case, the operand size is respected and the upper bits are
11532 * cleared (starting with some pentium).
11533 */
11534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11535 {
11536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11537 switch (pVCpu->iem.s.enmEffOpSize)
11538 {
11539 case IEMMODE_16BIT:
11540 IEM_MC_BEGIN(0, 1);
11541 IEM_MC_LOCAL(uint16_t, u16Value);
11542 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11544 IEM_MC_ADVANCE_RIP();
11545 IEM_MC_END();
11546 break;
11547
11548 case IEMMODE_32BIT:
11549 IEM_MC_BEGIN(0, 1);
11550 IEM_MC_LOCAL(uint32_t, u32Value);
11551 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11553 IEM_MC_ADVANCE_RIP();
11554 IEM_MC_END();
11555 break;
11556
11557 case IEMMODE_64BIT:
11558 IEM_MC_BEGIN(0, 1);
11559 IEM_MC_LOCAL(uint64_t, u64Value);
11560 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11562 IEM_MC_ADVANCE_RIP();
11563 IEM_MC_END();
11564 break;
11565 }
11566 }
11567 else
11568 {
11569 /*
11570 * We're saving the register to memory. The access is word sized
11571 * regardless of operand size prefixes.
11572 */
11573#if 0 /* not necessary */
11574 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11575#endif
11576 IEM_MC_BEGIN(0, 2);
11577 IEM_MC_LOCAL(uint16_t, u16Value);
11578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11581 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11582 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11583 IEM_MC_ADVANCE_RIP();
11584 IEM_MC_END();
11585 }
11586 return VINF_SUCCESS;
11587}
11588
11589
11590
11591
11592/** Opcode 0x8d. */
11593FNIEMOP_DEF(iemOp_lea_Gv_M)
11594{
11595 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11598 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11599
11600 switch (pVCpu->iem.s.enmEffOpSize)
11601 {
11602 case IEMMODE_16BIT:
11603 IEM_MC_BEGIN(0, 2);
11604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11605 IEM_MC_LOCAL(uint16_t, u16Cast);
11606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11608 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11609 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11610 IEM_MC_ADVANCE_RIP();
11611 IEM_MC_END();
11612 return VINF_SUCCESS;
11613
11614 case IEMMODE_32BIT:
11615 IEM_MC_BEGIN(0, 2);
11616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11617 IEM_MC_LOCAL(uint32_t, u32Cast);
11618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11620 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11621 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11622 IEM_MC_ADVANCE_RIP();
11623 IEM_MC_END();
11624 return VINF_SUCCESS;
11625
11626 case IEMMODE_64BIT:
11627 IEM_MC_BEGIN(0, 1);
11628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11631 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11632 IEM_MC_ADVANCE_RIP();
11633 IEM_MC_END();
11634 return VINF_SUCCESS;
11635 }
11636 AssertFailedReturn(VERR_IEM_IPE_7);
11637}
11638
11639
11640/** Opcode 0x8e. */
11641FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11642{
11643 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11644
11645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11646
11647 /*
11648 * The practical operand size is 16-bit.
11649 */
11650#if 0 /* not necessary */
11651 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11652#endif
11653
11654 /*
11655 * Check that the destination register exists and can be used with this
11656 * instruction. The REX.R prefix is ignored.
11657 */
11658 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11659 if ( iSegReg == X86_SREG_CS
11660 || iSegReg > X86_SREG_GS)
11661 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11662
11663 /*
11664 * If rm is denoting a register, no more instruction bytes.
11665 */
11666 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11667 {
11668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11669 IEM_MC_BEGIN(2, 0);
11670 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11671 IEM_MC_ARG(uint16_t, u16Value, 1);
11672 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11673 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11674 IEM_MC_END();
11675 }
11676 else
11677 {
11678 /*
11679 * We're loading the register from memory. The access is word sized
11680 * regardless of operand size prefixes.
11681 */
11682 IEM_MC_BEGIN(2, 1);
11683 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11684 IEM_MC_ARG(uint16_t, u16Value, 1);
11685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11688 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11689 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11690 IEM_MC_END();
11691 }
11692 return VINF_SUCCESS;
11693}
11694
11695
11696/** Opcode 0x8f /0. */
11697FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11698{
11699 /* This bugger is rather annoying as it requires rSP to be updated before
11700 doing the effective address calculations. Will eventually require a
11701 split between the R/M+SIB decoding and the effective address
11702 calculation - which is something that is required for any attempt at
11703 reusing this code for a recompiler. It may also be good to have if we
11704 need to delay #UD exception caused by invalid lock prefixes.
11705
11706 For now, we'll do a mostly safe interpreter-only implementation here. */
11707 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11708 * now until tests show it's checked.. */
11709 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11710
11711 /* Register access is relatively easy and can share code. */
11712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11713 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11714
11715 /*
11716 * Memory target.
11717 *
11718 * Intel says that RSP is incremented before it's used in any effective
11719 * address calcuations. This means some serious extra annoyance here since
11720 * we decode and calculate the effective address in one step and like to
11721 * delay committing registers till everything is done.
11722 *
11723 * So, we'll decode and calculate the effective address twice. This will
11724 * require some recoding if turned into a recompiler.
11725 */
11726 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11727
11728#ifndef TST_IEM_CHECK_MC
11729 /* Calc effective address with modified ESP. */
11730/** @todo testcase */
11731 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11732 RTGCPTR GCPtrEff;
11733 VBOXSTRICTRC rcStrict;
11734 switch (pVCpu->iem.s.enmEffOpSize)
11735 {
11736 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11737 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11738 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11740 }
11741 if (rcStrict != VINF_SUCCESS)
11742 return rcStrict;
11743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11744
11745 /* Perform the operation - this should be CImpl. */
11746 RTUINT64U TmpRsp;
11747 TmpRsp.u = pCtx->rsp;
11748 switch (pVCpu->iem.s.enmEffOpSize)
11749 {
11750 case IEMMODE_16BIT:
11751 {
11752 uint16_t u16Value;
11753 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11754 if (rcStrict == VINF_SUCCESS)
11755 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11756 break;
11757 }
11758
11759 case IEMMODE_32BIT:
11760 {
11761 uint32_t u32Value;
11762 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11763 if (rcStrict == VINF_SUCCESS)
11764 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11765 break;
11766 }
11767
11768 case IEMMODE_64BIT:
11769 {
11770 uint64_t u64Value;
11771 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11772 if (rcStrict == VINF_SUCCESS)
11773 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11774 break;
11775 }
11776
11777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11778 }
11779 if (rcStrict == VINF_SUCCESS)
11780 {
11781 pCtx->rsp = TmpRsp.u;
11782 iemRegUpdateRipAndClearRF(pVCpu);
11783 }
11784 return rcStrict;
11785
11786#else
11787 return VERR_IEM_IPE_2;
11788#endif
11789}
11790
11791
11792/** Opcode 0x8f. */
11793FNIEMOP_DEF(iemOp_Grp1A)
11794{
11795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11796 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11797 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11798
11799 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11800 /** @todo XOP decoding. */
11801 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11802 return IEMOP_RAISE_INVALID_OPCODE();
11803}
11804
11805
11806/**
11807 * Common 'xchg reg,rAX' helper.
11808 */
11809FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11810{
11811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11812
11813 iReg |= pVCpu->iem.s.uRexB;
11814 switch (pVCpu->iem.s.enmEffOpSize)
11815 {
11816 case IEMMODE_16BIT:
11817 IEM_MC_BEGIN(0, 2);
11818 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11819 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11820 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11821 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11822 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11823 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11824 IEM_MC_ADVANCE_RIP();
11825 IEM_MC_END();
11826 return VINF_SUCCESS;
11827
11828 case IEMMODE_32BIT:
11829 IEM_MC_BEGIN(0, 2);
11830 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11831 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11832 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11833 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11834 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11835 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11836 IEM_MC_ADVANCE_RIP();
11837 IEM_MC_END();
11838 return VINF_SUCCESS;
11839
11840 case IEMMODE_64BIT:
11841 IEM_MC_BEGIN(0, 2);
11842 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11843 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11844 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11845 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11846 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11847 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11848 IEM_MC_ADVANCE_RIP();
11849 IEM_MC_END();
11850 return VINF_SUCCESS;
11851
11852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11853 }
11854}
11855
11856
11857/** Opcode 0x90. */
11858FNIEMOP_DEF(iemOp_nop)
11859{
11860 /* R8/R8D and RAX/EAX can be exchanged. */
11861 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11862 {
11863 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11864 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11865 }
11866
11867 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11868 IEMOP_MNEMONIC(pause, "pause");
11869 else
11870 IEMOP_MNEMONIC(nop, "nop");
11871 IEM_MC_BEGIN(0, 0);
11872 IEM_MC_ADVANCE_RIP();
11873 IEM_MC_END();
11874 return VINF_SUCCESS;
11875}
11876
11877
11878/** Opcode 0x91. */
11879FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11880{
11881 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11882 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11883}
11884
11885
11886/** Opcode 0x92. */
11887FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11888{
11889 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11890 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11891}
11892
11893
11894/** Opcode 0x93. */
11895FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11896{
11897 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11898 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11899}
11900
11901
11902/** Opcode 0x94. */
11903FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11904{
11905 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11906 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11907}
11908
11909
11910/** Opcode 0x95. */
11911FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11912{
11913 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11914 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11915}
11916
11917
11918/** Opcode 0x96. */
11919FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11920{
11921 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11922 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11923}
11924
11925
11926/** Opcode 0x97. */
11927FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11928{
11929 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11930 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11931}
11932
11933
11934/** Opcode 0x98. */
11935FNIEMOP_DEF(iemOp_cbw)
11936{
11937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11938 switch (pVCpu->iem.s.enmEffOpSize)
11939 {
11940 case IEMMODE_16BIT:
11941 IEMOP_MNEMONIC(cbw, "cbw");
11942 IEM_MC_BEGIN(0, 1);
11943 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11944 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11945 } IEM_MC_ELSE() {
11946 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11947 } IEM_MC_ENDIF();
11948 IEM_MC_ADVANCE_RIP();
11949 IEM_MC_END();
11950 return VINF_SUCCESS;
11951
11952 case IEMMODE_32BIT:
11953 IEMOP_MNEMONIC(cwde, "cwde");
11954 IEM_MC_BEGIN(0, 1);
11955 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11956 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11957 } IEM_MC_ELSE() {
11958 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11959 } IEM_MC_ENDIF();
11960 IEM_MC_ADVANCE_RIP();
11961 IEM_MC_END();
11962 return VINF_SUCCESS;
11963
11964 case IEMMODE_64BIT:
11965 IEMOP_MNEMONIC(cdqe, "cdqe");
11966 IEM_MC_BEGIN(0, 1);
11967 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11968 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11969 } IEM_MC_ELSE() {
11970 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11971 } IEM_MC_ENDIF();
11972 IEM_MC_ADVANCE_RIP();
11973 IEM_MC_END();
11974 return VINF_SUCCESS;
11975
11976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11977 }
11978}
11979
11980
11981/** Opcode 0x99. */
11982FNIEMOP_DEF(iemOp_cwd)
11983{
11984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11985 switch (pVCpu->iem.s.enmEffOpSize)
11986 {
11987 case IEMMODE_16BIT:
11988 IEMOP_MNEMONIC(cwd, "cwd");
11989 IEM_MC_BEGIN(0, 1);
11990 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11991 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11992 } IEM_MC_ELSE() {
11993 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11994 } IEM_MC_ENDIF();
11995 IEM_MC_ADVANCE_RIP();
11996 IEM_MC_END();
11997 return VINF_SUCCESS;
11998
11999 case IEMMODE_32BIT:
12000 IEMOP_MNEMONIC(cdq, "cdq");
12001 IEM_MC_BEGIN(0, 1);
12002 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12003 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
12004 } IEM_MC_ELSE() {
12005 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
12006 } IEM_MC_ENDIF();
12007 IEM_MC_ADVANCE_RIP();
12008 IEM_MC_END();
12009 return VINF_SUCCESS;
12010
12011 case IEMMODE_64BIT:
12012 IEMOP_MNEMONIC(cqo, "cqo");
12013 IEM_MC_BEGIN(0, 1);
12014 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
12015 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
12016 } IEM_MC_ELSE() {
12017 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
12018 } IEM_MC_ENDIF();
12019 IEM_MC_ADVANCE_RIP();
12020 IEM_MC_END();
12021 return VINF_SUCCESS;
12022
12023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12024 }
12025}
12026
12027
12028/** Opcode 0x9a. */
12029FNIEMOP_DEF(iemOp_call_Ap)
12030{
12031 IEMOP_MNEMONIC(call_Ap, "call Ap");
12032 IEMOP_HLP_NO_64BIT();
12033
12034 /* Decode the far pointer address and pass it on to the far call C implementation. */
12035 uint32_t offSeg;
12036 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12037 IEM_OPCODE_GET_NEXT_U32(&offSeg);
12038 else
12039 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
12040 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
12041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12042 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
12043}
12044
12045
12046/** Opcode 0x9b. (aka fwait) */
12047FNIEMOP_DEF(iemOp_wait)
12048{
12049 IEMOP_MNEMONIC(wait, "wait");
12050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12051
12052 IEM_MC_BEGIN(0, 0);
12053 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12054 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12055 IEM_MC_ADVANCE_RIP();
12056 IEM_MC_END();
12057 return VINF_SUCCESS;
12058}
12059
12060
12061/** Opcode 0x9c. */
12062FNIEMOP_DEF(iemOp_pushf_Fv)
12063{
12064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12066 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12067}
12068
12069
12070/** Opcode 0x9d. */
12071FNIEMOP_DEF(iemOp_popf_Fv)
12072{
12073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12074 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12075 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12076}
12077
12078
12079/** Opcode 0x9e. */
12080FNIEMOP_DEF(iemOp_sahf)
12081{
12082 IEMOP_MNEMONIC(sahf, "sahf");
12083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12084 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12085 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12086 return IEMOP_RAISE_INVALID_OPCODE();
12087 IEM_MC_BEGIN(0, 2);
12088 IEM_MC_LOCAL(uint32_t, u32Flags);
12089 IEM_MC_LOCAL(uint32_t, EFlags);
12090 IEM_MC_FETCH_EFLAGS(EFlags);
12091 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12092 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12093 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12094 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12095 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12096 IEM_MC_COMMIT_EFLAGS(EFlags);
12097 IEM_MC_ADVANCE_RIP();
12098 IEM_MC_END();
12099 return VINF_SUCCESS;
12100}
12101
12102
12103/** Opcode 0x9f. */
12104FNIEMOP_DEF(iemOp_lahf)
12105{
12106 IEMOP_MNEMONIC(lahf, "lahf");
12107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12108 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12109 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12110 return IEMOP_RAISE_INVALID_OPCODE();
12111 IEM_MC_BEGIN(0, 1);
12112 IEM_MC_LOCAL(uint8_t, u8Flags);
12113 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12114 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12115 IEM_MC_ADVANCE_RIP();
12116 IEM_MC_END();
12117 return VINF_SUCCESS;
12118}
12119
12120
12121/**
12122 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12123 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12124 * prefixes. Will return on failures.
12125 * @param a_GCPtrMemOff The variable to store the offset in.
12126 */
12127#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12128 do \
12129 { \
12130 switch (pVCpu->iem.s.enmEffAddrMode) \
12131 { \
12132 case IEMMODE_16BIT: \
12133 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12134 break; \
12135 case IEMMODE_32BIT: \
12136 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12137 break; \
12138 case IEMMODE_64BIT: \
12139 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12140 break; \
12141 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12142 } \
12143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12144 } while (0)
12145
12146/** Opcode 0xa0. */
12147FNIEMOP_DEF(iemOp_mov_Al_Ob)
12148{
12149 /*
12150 * Get the offset and fend of lock prefixes.
12151 */
12152 RTGCPTR GCPtrMemOff;
12153 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12154
12155 /*
12156 * Fetch AL.
12157 */
12158 IEM_MC_BEGIN(0,1);
12159 IEM_MC_LOCAL(uint8_t, u8Tmp);
12160 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12161 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12162 IEM_MC_ADVANCE_RIP();
12163 IEM_MC_END();
12164 return VINF_SUCCESS;
12165}
12166
12167
12168/** Opcode 0xa1. */
12169FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12170{
12171 /*
12172 * Get the offset and fend of lock prefixes.
12173 */
12174 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12175 RTGCPTR GCPtrMemOff;
12176 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12177
12178 /*
12179 * Fetch rAX.
12180 */
12181 switch (pVCpu->iem.s.enmEffOpSize)
12182 {
12183 case IEMMODE_16BIT:
12184 IEM_MC_BEGIN(0,1);
12185 IEM_MC_LOCAL(uint16_t, u16Tmp);
12186 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12187 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12188 IEM_MC_ADVANCE_RIP();
12189 IEM_MC_END();
12190 return VINF_SUCCESS;
12191
12192 case IEMMODE_32BIT:
12193 IEM_MC_BEGIN(0,1);
12194 IEM_MC_LOCAL(uint32_t, u32Tmp);
12195 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12196 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12197 IEM_MC_ADVANCE_RIP();
12198 IEM_MC_END();
12199 return VINF_SUCCESS;
12200
12201 case IEMMODE_64BIT:
12202 IEM_MC_BEGIN(0,1);
12203 IEM_MC_LOCAL(uint64_t, u64Tmp);
12204 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12205 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12206 IEM_MC_ADVANCE_RIP();
12207 IEM_MC_END();
12208 return VINF_SUCCESS;
12209
12210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12211 }
12212}
12213
12214
12215/** Opcode 0xa2. */
12216FNIEMOP_DEF(iemOp_mov_Ob_AL)
12217{
12218 /*
12219 * Get the offset and fend of lock prefixes.
12220 */
12221 RTGCPTR GCPtrMemOff;
12222 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12223
12224 /*
12225 * Store AL.
12226 */
12227 IEM_MC_BEGIN(0,1);
12228 IEM_MC_LOCAL(uint8_t, u8Tmp);
12229 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12230 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12231 IEM_MC_ADVANCE_RIP();
12232 IEM_MC_END();
12233 return VINF_SUCCESS;
12234}
12235
12236
12237/** Opcode 0xa3. */
12238FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12239{
12240 /*
12241 * Get the offset and fend of lock prefixes.
12242 */
12243 RTGCPTR GCPtrMemOff;
12244 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12245
12246 /*
12247 * Store rAX.
12248 */
12249 switch (pVCpu->iem.s.enmEffOpSize)
12250 {
12251 case IEMMODE_16BIT:
12252 IEM_MC_BEGIN(0,1);
12253 IEM_MC_LOCAL(uint16_t, u16Tmp);
12254 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12255 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12256 IEM_MC_ADVANCE_RIP();
12257 IEM_MC_END();
12258 return VINF_SUCCESS;
12259
12260 case IEMMODE_32BIT:
12261 IEM_MC_BEGIN(0,1);
12262 IEM_MC_LOCAL(uint32_t, u32Tmp);
12263 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12264 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12265 IEM_MC_ADVANCE_RIP();
12266 IEM_MC_END();
12267 return VINF_SUCCESS;
12268
12269 case IEMMODE_64BIT:
12270 IEM_MC_BEGIN(0,1);
12271 IEM_MC_LOCAL(uint64_t, u64Tmp);
12272 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12273 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12274 IEM_MC_ADVANCE_RIP();
12275 IEM_MC_END();
12276 return VINF_SUCCESS;
12277
12278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12279 }
12280}
12281
12282/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12283#define IEM_MOVS_CASE(ValBits, AddrBits) \
12284 IEM_MC_BEGIN(0, 2); \
12285 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12286 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12287 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12288 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12289 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12290 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12291 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12292 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12293 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12294 } IEM_MC_ELSE() { \
12295 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12296 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12297 } IEM_MC_ENDIF(); \
12298 IEM_MC_ADVANCE_RIP(); \
12299 IEM_MC_END();
12300
12301/** Opcode 0xa4. */
12302FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12303{
12304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12305
12306 /*
12307 * Use the C implementation if a repeat prefix is encountered.
12308 */
12309 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12310 {
12311 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12312 switch (pVCpu->iem.s.enmEffAddrMode)
12313 {
12314 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12315 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12316 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12318 }
12319 }
12320 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12321
12322 /*
12323 * Sharing case implementation with movs[wdq] below.
12324 */
12325 switch (pVCpu->iem.s.enmEffAddrMode)
12326 {
12327 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12328 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12329 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12331 }
12332 return VINF_SUCCESS;
12333}
12334
12335
12336/** Opcode 0xa5. */
12337FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12338{
12339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12340
12341 /*
12342 * Use the C implementation if a repeat prefix is encountered.
12343 */
12344 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12345 {
12346 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12347 switch (pVCpu->iem.s.enmEffOpSize)
12348 {
12349 case IEMMODE_16BIT:
12350 switch (pVCpu->iem.s.enmEffAddrMode)
12351 {
12352 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12353 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12354 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12356 }
12357 break;
12358 case IEMMODE_32BIT:
12359 switch (pVCpu->iem.s.enmEffAddrMode)
12360 {
12361 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12362 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12363 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12365 }
12366 case IEMMODE_64BIT:
12367 switch (pVCpu->iem.s.enmEffAddrMode)
12368 {
12369 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12370 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12371 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12373 }
12374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12375 }
12376 }
12377 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12378
12379 /*
12380 * Annoying double switch here.
12381 * Using ugly macro for implementing the cases, sharing it with movsb.
12382 */
12383 switch (pVCpu->iem.s.enmEffOpSize)
12384 {
12385 case IEMMODE_16BIT:
12386 switch (pVCpu->iem.s.enmEffAddrMode)
12387 {
12388 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12389 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12390 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12392 }
12393 break;
12394
12395 case IEMMODE_32BIT:
12396 switch (pVCpu->iem.s.enmEffAddrMode)
12397 {
12398 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12399 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12400 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12402 }
12403 break;
12404
12405 case IEMMODE_64BIT:
12406 switch (pVCpu->iem.s.enmEffAddrMode)
12407 {
12408 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12409 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12410 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12412 }
12413 break;
12414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12415 }
12416 return VINF_SUCCESS;
12417}
12418
12419#undef IEM_MOVS_CASE
12420
12421/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12422#define IEM_CMPS_CASE(ValBits, AddrBits) \
12423 IEM_MC_BEGIN(3, 3); \
12424 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12425 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12426 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12427 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12428 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12429 \
12430 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12431 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12432 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12433 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12434 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12435 IEM_MC_REF_EFLAGS(pEFlags); \
12436 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12437 \
12438 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12439 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12440 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12441 } IEM_MC_ELSE() { \
12442 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12443 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12444 } IEM_MC_ENDIF(); \
12445 IEM_MC_ADVANCE_RIP(); \
12446 IEM_MC_END(); \
12447
12448/** Opcode 0xa6. */
12449FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12450{
12451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12452
12453 /*
12454 * Use the C implementation if a repeat prefix is encountered.
12455 */
12456 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12457 {
12458 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12459 switch (pVCpu->iem.s.enmEffAddrMode)
12460 {
12461 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12462 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12463 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12465 }
12466 }
12467 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12468 {
12469 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12470 switch (pVCpu->iem.s.enmEffAddrMode)
12471 {
12472 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12473 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12474 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12476 }
12477 }
12478 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12479
12480 /*
12481 * Sharing case implementation with cmps[wdq] below.
12482 */
12483 switch (pVCpu->iem.s.enmEffAddrMode)
12484 {
12485 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12486 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12487 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12489 }
12490 return VINF_SUCCESS;
12491
12492}
12493
12494
12495/** Opcode 0xa7. */
12496FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12497{
12498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12499
12500 /*
12501 * Use the C implementation if a repeat prefix is encountered.
12502 */
12503 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12504 {
12505 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12506 switch (pVCpu->iem.s.enmEffOpSize)
12507 {
12508 case IEMMODE_16BIT:
12509 switch (pVCpu->iem.s.enmEffAddrMode)
12510 {
12511 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12512 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12513 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12515 }
12516 break;
12517 case IEMMODE_32BIT:
12518 switch (pVCpu->iem.s.enmEffAddrMode)
12519 {
12520 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12521 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12522 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12524 }
12525 case IEMMODE_64BIT:
12526 switch (pVCpu->iem.s.enmEffAddrMode)
12527 {
12528 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12529 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12530 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12532 }
12533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12534 }
12535 }
12536
12537 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12538 {
12539 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12540 switch (pVCpu->iem.s.enmEffOpSize)
12541 {
12542 case IEMMODE_16BIT:
12543 switch (pVCpu->iem.s.enmEffAddrMode)
12544 {
12545 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12546 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12547 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12549 }
12550 break;
12551 case IEMMODE_32BIT:
12552 switch (pVCpu->iem.s.enmEffAddrMode)
12553 {
12554 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12555 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12556 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12558 }
12559 case IEMMODE_64BIT:
12560 switch (pVCpu->iem.s.enmEffAddrMode)
12561 {
12562 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12563 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12564 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12566 }
12567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12568 }
12569 }
12570
12571 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12572
12573 /*
12574 * Annoying double switch here.
12575 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12576 */
12577 switch (pVCpu->iem.s.enmEffOpSize)
12578 {
12579 case IEMMODE_16BIT:
12580 switch (pVCpu->iem.s.enmEffAddrMode)
12581 {
12582 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12583 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12584 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12586 }
12587 break;
12588
12589 case IEMMODE_32BIT:
12590 switch (pVCpu->iem.s.enmEffAddrMode)
12591 {
12592 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12593 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12594 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12596 }
12597 break;
12598
12599 case IEMMODE_64BIT:
12600 switch (pVCpu->iem.s.enmEffAddrMode)
12601 {
12602 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12603 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12604 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12606 }
12607 break;
12608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12609 }
12610 return VINF_SUCCESS;
12611
12612}
12613
12614#undef IEM_CMPS_CASE
12615
12616/** Opcode 0xa8. */
12617FNIEMOP_DEF(iemOp_test_AL_Ib)
12618{
12619 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12622}
12623
12624
12625/** Opcode 0xa9. */
12626FNIEMOP_DEF(iemOp_test_eAX_Iz)
12627{
12628 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12629 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12630 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12631}
12632
12633
12634/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12635#define IEM_STOS_CASE(ValBits, AddrBits) \
12636 IEM_MC_BEGIN(0, 2); \
12637 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12638 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12639 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12640 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12641 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12643 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12644 } IEM_MC_ELSE() { \
12645 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12646 } IEM_MC_ENDIF(); \
12647 IEM_MC_ADVANCE_RIP(); \
12648 IEM_MC_END(); \
12649
12650/** Opcode 0xaa. */
12651FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12652{
12653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12654
12655 /*
12656 * Use the C implementation if a repeat prefix is encountered.
12657 */
12658 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12659 {
12660 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12661 switch (pVCpu->iem.s.enmEffAddrMode)
12662 {
12663 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12664 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12665 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12667 }
12668 }
12669 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12670
12671 /*
12672 * Sharing case implementation with stos[wdq] below.
12673 */
12674 switch (pVCpu->iem.s.enmEffAddrMode)
12675 {
12676 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12677 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12678 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12680 }
12681 return VINF_SUCCESS;
12682}
12683
12684
12685/** Opcode 0xab. */
12686FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12687{
12688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12689
12690 /*
12691 * Use the C implementation if a repeat prefix is encountered.
12692 */
12693 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12694 {
12695 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12696 switch (pVCpu->iem.s.enmEffOpSize)
12697 {
12698 case IEMMODE_16BIT:
12699 switch (pVCpu->iem.s.enmEffAddrMode)
12700 {
12701 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12702 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12703 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12705 }
12706 break;
12707 case IEMMODE_32BIT:
12708 switch (pVCpu->iem.s.enmEffAddrMode)
12709 {
12710 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12711 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12712 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12714 }
12715 case IEMMODE_64BIT:
12716 switch (pVCpu->iem.s.enmEffAddrMode)
12717 {
12718 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12719 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12720 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12722 }
12723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12724 }
12725 }
12726 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12727
12728 /*
12729 * Annoying double switch here.
12730 * Using ugly macro for implementing the cases, sharing it with stosb.
12731 */
12732 switch (pVCpu->iem.s.enmEffOpSize)
12733 {
12734 case IEMMODE_16BIT:
12735 switch (pVCpu->iem.s.enmEffAddrMode)
12736 {
12737 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12738 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12739 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12741 }
12742 break;
12743
12744 case IEMMODE_32BIT:
12745 switch (pVCpu->iem.s.enmEffAddrMode)
12746 {
12747 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12748 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12749 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12751 }
12752 break;
12753
12754 case IEMMODE_64BIT:
12755 switch (pVCpu->iem.s.enmEffAddrMode)
12756 {
12757 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12758 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12759 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12761 }
12762 break;
12763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12764 }
12765 return VINF_SUCCESS;
12766}
12767
12768#undef IEM_STOS_CASE
12769
12770/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12771#define IEM_LODS_CASE(ValBits, AddrBits) \
12772 IEM_MC_BEGIN(0, 2); \
12773 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12774 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12775 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12776 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12777 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12778 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12779 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12780 } IEM_MC_ELSE() { \
12781 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12782 } IEM_MC_ENDIF(); \
12783 IEM_MC_ADVANCE_RIP(); \
12784 IEM_MC_END();
12785
12786/** Opcode 0xac. */
12787FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12788{
12789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12790
12791 /*
12792 * Use the C implementation if a repeat prefix is encountered.
12793 */
12794 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12795 {
12796 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12797 switch (pVCpu->iem.s.enmEffAddrMode)
12798 {
12799 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12800 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12801 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12803 }
12804 }
12805 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12806
12807 /*
12808 * Sharing case implementation with stos[wdq] below.
12809 */
12810 switch (pVCpu->iem.s.enmEffAddrMode)
12811 {
12812 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12813 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12814 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12816 }
12817 return VINF_SUCCESS;
12818}
12819
12820
12821/** Opcode 0xad. */
12822FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12823{
12824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12825
12826 /*
12827 * Use the C implementation if a repeat prefix is encountered.
12828 */
12829 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12830 {
12831 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12832 switch (pVCpu->iem.s.enmEffOpSize)
12833 {
12834 case IEMMODE_16BIT:
12835 switch (pVCpu->iem.s.enmEffAddrMode)
12836 {
12837 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12838 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12839 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12841 }
12842 break;
12843 case IEMMODE_32BIT:
12844 switch (pVCpu->iem.s.enmEffAddrMode)
12845 {
12846 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12847 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12848 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12850 }
12851 case IEMMODE_64BIT:
12852 switch (pVCpu->iem.s.enmEffAddrMode)
12853 {
12854 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12855 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12856 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12858 }
12859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12860 }
12861 }
12862 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12863
12864 /*
12865 * Annoying double switch here.
12866 * Using ugly macro for implementing the cases, sharing it with lodsb.
12867 */
12868 switch (pVCpu->iem.s.enmEffOpSize)
12869 {
12870 case IEMMODE_16BIT:
12871 switch (pVCpu->iem.s.enmEffAddrMode)
12872 {
12873 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12874 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12875 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12877 }
12878 break;
12879
12880 case IEMMODE_32BIT:
12881 switch (pVCpu->iem.s.enmEffAddrMode)
12882 {
12883 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12884 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12885 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12887 }
12888 break;
12889
12890 case IEMMODE_64BIT:
12891 switch (pVCpu->iem.s.enmEffAddrMode)
12892 {
12893 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12894 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12895 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12897 }
12898 break;
12899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12900 }
12901 return VINF_SUCCESS;
12902}
12903
12904#undef IEM_LODS_CASE
12905
12906/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12907#define IEM_SCAS_CASE(ValBits, AddrBits) \
12908 IEM_MC_BEGIN(3, 2); \
12909 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12910 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12911 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12912 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12913 \
12914 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12915 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12916 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12917 IEM_MC_REF_EFLAGS(pEFlags); \
12918 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12919 \
12920 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12921 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12922 } IEM_MC_ELSE() { \
12923 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12924 } IEM_MC_ENDIF(); \
12925 IEM_MC_ADVANCE_RIP(); \
12926 IEM_MC_END();
12927
12928/** Opcode 0xae. */
12929FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12930{
12931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12932
12933 /*
12934 * Use the C implementation if a repeat prefix is encountered.
12935 */
12936 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12937 {
12938 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12939 switch (pVCpu->iem.s.enmEffAddrMode)
12940 {
12941 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12942 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12943 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12945 }
12946 }
12947 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12948 {
12949 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12950 switch (pVCpu->iem.s.enmEffAddrMode)
12951 {
12952 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12953 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12954 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12956 }
12957 }
12958 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12959
12960 /*
12961 * Sharing case implementation with stos[wdq] below.
12962 */
12963 switch (pVCpu->iem.s.enmEffAddrMode)
12964 {
12965 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12966 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12967 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12969 }
12970 return VINF_SUCCESS;
12971}
12972
12973
12974/** Opcode 0xaf. */
12975FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12976{
12977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12978
12979 /*
12980 * Use the C implementation if a repeat prefix is encountered.
12981 */
12982 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12983 {
12984 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12985 switch (pVCpu->iem.s.enmEffOpSize)
12986 {
12987 case IEMMODE_16BIT:
12988 switch (pVCpu->iem.s.enmEffAddrMode)
12989 {
12990 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12991 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12992 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12994 }
12995 break;
12996 case IEMMODE_32BIT:
12997 switch (pVCpu->iem.s.enmEffAddrMode)
12998 {
12999 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
13000 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
13001 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
13002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13003 }
13004 case IEMMODE_64BIT:
13005 switch (pVCpu->iem.s.enmEffAddrMode)
13006 {
13007 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
13008 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
13009 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
13010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13011 }
13012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13013 }
13014 }
13015 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13016 {
13017 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
13018 switch (pVCpu->iem.s.enmEffOpSize)
13019 {
13020 case IEMMODE_16BIT:
13021 switch (pVCpu->iem.s.enmEffAddrMode)
13022 {
13023 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
13024 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
13025 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
13026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13027 }
13028 break;
13029 case IEMMODE_32BIT:
13030 switch (pVCpu->iem.s.enmEffAddrMode)
13031 {
13032 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
13033 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
13034 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
13035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13036 }
13037 case IEMMODE_64BIT:
13038 switch (pVCpu->iem.s.enmEffAddrMode)
13039 {
13040 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
13041 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
13042 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
13043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13044 }
13045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13046 }
13047 }
13048 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
13049
13050 /*
13051 * Annoying double switch here.
13052 * Using ugly macro for implementing the cases, sharing it with scasb.
13053 */
13054 switch (pVCpu->iem.s.enmEffOpSize)
13055 {
13056 case IEMMODE_16BIT:
13057 switch (pVCpu->iem.s.enmEffAddrMode)
13058 {
13059 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13060 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13061 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13063 }
13064 break;
13065
13066 case IEMMODE_32BIT:
13067 switch (pVCpu->iem.s.enmEffAddrMode)
13068 {
13069 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13070 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13071 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13073 }
13074 break;
13075
13076 case IEMMODE_64BIT:
13077 switch (pVCpu->iem.s.enmEffAddrMode)
13078 {
13079 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13080 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13081 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13083 }
13084 break;
13085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13086 }
13087 return VINF_SUCCESS;
13088}
13089
13090#undef IEM_SCAS_CASE
13091
13092/**
13093 * Common 'mov r8, imm8' helper.
13094 */
13095FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13096{
13097 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13099
13100 IEM_MC_BEGIN(0, 1);
13101 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13102 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13103 IEM_MC_ADVANCE_RIP();
13104 IEM_MC_END();
13105
13106 return VINF_SUCCESS;
13107}
13108
13109
13110/** Opcode 0xb0. */
13111FNIEMOP_DEF(iemOp_mov_AL_Ib)
13112{
13113 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13114 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13115}
13116
13117
13118/** Opcode 0xb1. */
13119FNIEMOP_DEF(iemOp_CL_Ib)
13120{
13121 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13122 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13123}
13124
13125
13126/** Opcode 0xb2. */
13127FNIEMOP_DEF(iemOp_DL_Ib)
13128{
13129 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13130 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13131}
13132
13133
13134/** Opcode 0xb3. */
13135FNIEMOP_DEF(iemOp_BL_Ib)
13136{
13137 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13138 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13139}
13140
13141
13142/** Opcode 0xb4. */
13143FNIEMOP_DEF(iemOp_mov_AH_Ib)
13144{
13145 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13146 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13147}
13148
13149
13150/** Opcode 0xb5. */
13151FNIEMOP_DEF(iemOp_CH_Ib)
13152{
13153 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13154 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13155}
13156
13157
13158/** Opcode 0xb6. */
13159FNIEMOP_DEF(iemOp_DH_Ib)
13160{
13161 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13162 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13163}
13164
13165
13166/** Opcode 0xb7. */
13167FNIEMOP_DEF(iemOp_BH_Ib)
13168{
13169 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13170 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13171}
13172
13173
13174/**
13175 * Common 'mov regX,immX' helper.
13176 */
13177FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13178{
13179 switch (pVCpu->iem.s.enmEffOpSize)
13180 {
13181 case IEMMODE_16BIT:
13182 {
13183 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13185
13186 IEM_MC_BEGIN(0, 1);
13187 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13188 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13189 IEM_MC_ADVANCE_RIP();
13190 IEM_MC_END();
13191 break;
13192 }
13193
13194 case IEMMODE_32BIT:
13195 {
13196 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13198
13199 IEM_MC_BEGIN(0, 1);
13200 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13201 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13202 IEM_MC_ADVANCE_RIP();
13203 IEM_MC_END();
13204 break;
13205 }
13206 case IEMMODE_64BIT:
13207 {
13208 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13210
13211 IEM_MC_BEGIN(0, 1);
13212 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13213 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13214 IEM_MC_ADVANCE_RIP();
13215 IEM_MC_END();
13216 break;
13217 }
13218 }
13219
13220 return VINF_SUCCESS;
13221}
13222
13223
13224/** Opcode 0xb8. */
13225FNIEMOP_DEF(iemOp_eAX_Iv)
13226{
13227 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13228 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13229}
13230
13231
13232/** Opcode 0xb9. */
13233FNIEMOP_DEF(iemOp_eCX_Iv)
13234{
13235 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13236 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13237}
13238
13239
13240/** Opcode 0xba. */
13241FNIEMOP_DEF(iemOp_eDX_Iv)
13242{
13243 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13244 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13245}
13246
13247
13248/** Opcode 0xbb. */
13249FNIEMOP_DEF(iemOp_eBX_Iv)
13250{
13251 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13252 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13253}
13254
13255
13256/** Opcode 0xbc. */
13257FNIEMOP_DEF(iemOp_eSP_Iv)
13258{
13259 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13260 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13261}
13262
13263
13264/** Opcode 0xbd. */
13265FNIEMOP_DEF(iemOp_eBP_Iv)
13266{
13267 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13268 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13269}
13270
13271
13272/** Opcode 0xbe. */
13273FNIEMOP_DEF(iemOp_eSI_Iv)
13274{
13275 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13276 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13277}
13278
13279
13280/** Opcode 0xbf. */
13281FNIEMOP_DEF(iemOp_eDI_Iv)
13282{
13283 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13284 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13285}
13286
13287
13288/** Opcode 0xc0. */
13289FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13290{
13291 IEMOP_HLP_MIN_186();
13292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13293 PCIEMOPSHIFTSIZES pImpl;
13294 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13295 {
13296 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13297 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13298 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13299 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13300 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13301 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13302 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13303 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13304 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13305 }
13306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13307
13308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13309 {
13310 /* register */
13311 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13313 IEM_MC_BEGIN(3, 0);
13314 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13315 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13316 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13317 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13318 IEM_MC_REF_EFLAGS(pEFlags);
13319 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13320 IEM_MC_ADVANCE_RIP();
13321 IEM_MC_END();
13322 }
13323 else
13324 {
13325 /* memory */
13326 IEM_MC_BEGIN(3, 2);
13327 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13328 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13329 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13331
13332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13333 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13334 IEM_MC_ASSIGN(cShiftArg, cShift);
13335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13336 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13337 IEM_MC_FETCH_EFLAGS(EFlags);
13338 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13339
13340 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13341 IEM_MC_COMMIT_EFLAGS(EFlags);
13342 IEM_MC_ADVANCE_RIP();
13343 IEM_MC_END();
13344 }
13345 return VINF_SUCCESS;
13346}
13347
13348
13349/** Opcode 0xc1. */
13350FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13351{
13352 IEMOP_HLP_MIN_186();
13353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13354 PCIEMOPSHIFTSIZES pImpl;
13355 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13356 {
13357 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13358 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13359 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13360 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13361 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13362 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13363 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13364 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13365 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13366 }
13367 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13368
13369 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13370 {
13371 /* register */
13372 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13374 switch (pVCpu->iem.s.enmEffOpSize)
13375 {
13376 case IEMMODE_16BIT:
13377 IEM_MC_BEGIN(3, 0);
13378 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13379 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13380 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13381 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13382 IEM_MC_REF_EFLAGS(pEFlags);
13383 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13384 IEM_MC_ADVANCE_RIP();
13385 IEM_MC_END();
13386 return VINF_SUCCESS;
13387
13388 case IEMMODE_32BIT:
13389 IEM_MC_BEGIN(3, 0);
13390 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13391 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13392 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13393 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13394 IEM_MC_REF_EFLAGS(pEFlags);
13395 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13396 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13397 IEM_MC_ADVANCE_RIP();
13398 IEM_MC_END();
13399 return VINF_SUCCESS;
13400
13401 case IEMMODE_64BIT:
13402 IEM_MC_BEGIN(3, 0);
13403 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13404 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13405 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13406 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13407 IEM_MC_REF_EFLAGS(pEFlags);
13408 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13409 IEM_MC_ADVANCE_RIP();
13410 IEM_MC_END();
13411 return VINF_SUCCESS;
13412
13413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13414 }
13415 }
13416 else
13417 {
13418 /* memory */
13419 switch (pVCpu->iem.s.enmEffOpSize)
13420 {
13421 case IEMMODE_16BIT:
13422 IEM_MC_BEGIN(3, 2);
13423 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13424 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13425 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13427
13428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13429 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13430 IEM_MC_ASSIGN(cShiftArg, cShift);
13431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13432 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13433 IEM_MC_FETCH_EFLAGS(EFlags);
13434 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13435
13436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13437 IEM_MC_COMMIT_EFLAGS(EFlags);
13438 IEM_MC_ADVANCE_RIP();
13439 IEM_MC_END();
13440 return VINF_SUCCESS;
13441
13442 case IEMMODE_32BIT:
13443 IEM_MC_BEGIN(3, 2);
13444 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13445 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13446 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13448
13449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13450 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13451 IEM_MC_ASSIGN(cShiftArg, cShift);
13452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13453 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13454 IEM_MC_FETCH_EFLAGS(EFlags);
13455 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13456
13457 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13458 IEM_MC_COMMIT_EFLAGS(EFlags);
13459 IEM_MC_ADVANCE_RIP();
13460 IEM_MC_END();
13461 return VINF_SUCCESS;
13462
13463 case IEMMODE_64BIT:
13464 IEM_MC_BEGIN(3, 2);
13465 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13466 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13467 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13469
13470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13471 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13472 IEM_MC_ASSIGN(cShiftArg, cShift);
13473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13474 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13475 IEM_MC_FETCH_EFLAGS(EFlags);
13476 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13477
13478 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13479 IEM_MC_COMMIT_EFLAGS(EFlags);
13480 IEM_MC_ADVANCE_RIP();
13481 IEM_MC_END();
13482 return VINF_SUCCESS;
13483
13484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13485 }
13486 }
13487}
13488
13489
13490/** Opcode 0xc2. */
13491FNIEMOP_DEF(iemOp_retn_Iw)
13492{
13493 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13494 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13496 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13497 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13498}
13499
13500
13501/** Opcode 0xc3. */
13502FNIEMOP_DEF(iemOp_retn)
13503{
13504 IEMOP_MNEMONIC(retn, "retn");
13505 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13507 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13508}
13509
13510
13511/** Opcode 0xc4. */
13512FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13513{
13514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13515 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13516 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13517 {
13518 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13519 /* The LES instruction is invalid 64-bit mode. In legacy and
13520 compatability mode it is invalid with MOD=3.
13521 The use as a VEX prefix is made possible by assigning the inverted
13522 REX.R to the top MOD bit, and the top bit in the inverted register
13523 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13524 to accessing registers 0..7 in this VEX form. */
13525 /** @todo VEX: Just use new tables for it. */
13526 return IEMOP_RAISE_INVALID_OPCODE();
13527 }
13528 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13529 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13530}
13531
13532
13533/** Opcode 0xc5. */
13534FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13535{
13536 /* The LDS instruction is invalid 64-bit mode. In legacy and
13537 compatability mode it is invalid with MOD=3.
13538 The use as a VEX prefix is made possible by assigning the inverted
13539 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13540 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13542 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13543 {
13544 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13545 {
13546 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13547 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13548 }
13549 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13550 }
13551
13552 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13553 /** @todo Test when exctly the VEX conformance checks kick in during
13554 * instruction decoding and fetching (using \#PF). */
13555 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13556 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13557 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13558#if 0 /* will make sense of this next week... */
13559 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13560 &&
13561 )
13562 {
13563
13564 }
13565#endif
13566
13567 /** @todo VEX: Just use new tables for it. */
13568 return IEMOP_RAISE_INVALID_OPCODE();
13569}
13570
13571
13572/** Opcode 0xc6. */
13573FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13574{
13575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13576 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13577 return IEMOP_RAISE_INVALID_OPCODE();
13578 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13579
13580 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13581 {
13582 /* register access */
13583 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13585 IEM_MC_BEGIN(0, 0);
13586 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13587 IEM_MC_ADVANCE_RIP();
13588 IEM_MC_END();
13589 }
13590 else
13591 {
13592 /* memory access. */
13593 IEM_MC_BEGIN(0, 1);
13594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13596 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13598 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13599 IEM_MC_ADVANCE_RIP();
13600 IEM_MC_END();
13601 }
13602 return VINF_SUCCESS;
13603}
13604
13605
13606/** Opcode 0xc7. */
13607FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13608{
13609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13610 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13611 return IEMOP_RAISE_INVALID_OPCODE();
13612 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13613
13614 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13615 {
13616 /* register access */
13617 switch (pVCpu->iem.s.enmEffOpSize)
13618 {
13619 case IEMMODE_16BIT:
13620 IEM_MC_BEGIN(0, 0);
13621 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13623 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13624 IEM_MC_ADVANCE_RIP();
13625 IEM_MC_END();
13626 return VINF_SUCCESS;
13627
13628 case IEMMODE_32BIT:
13629 IEM_MC_BEGIN(0, 0);
13630 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13632 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13633 IEM_MC_ADVANCE_RIP();
13634 IEM_MC_END();
13635 return VINF_SUCCESS;
13636
13637 case IEMMODE_64BIT:
13638 IEM_MC_BEGIN(0, 0);
13639 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13641 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13642 IEM_MC_ADVANCE_RIP();
13643 IEM_MC_END();
13644 return VINF_SUCCESS;
13645
13646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13647 }
13648 }
13649 else
13650 {
13651 /* memory access. */
13652 switch (pVCpu->iem.s.enmEffOpSize)
13653 {
13654 case IEMMODE_16BIT:
13655 IEM_MC_BEGIN(0, 1);
13656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13660 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13661 IEM_MC_ADVANCE_RIP();
13662 IEM_MC_END();
13663 return VINF_SUCCESS;
13664
13665 case IEMMODE_32BIT:
13666 IEM_MC_BEGIN(0, 1);
13667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13669 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13671 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13672 IEM_MC_ADVANCE_RIP();
13673 IEM_MC_END();
13674 return VINF_SUCCESS;
13675
13676 case IEMMODE_64BIT:
13677 IEM_MC_BEGIN(0, 1);
13678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13680 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13682 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13683 IEM_MC_ADVANCE_RIP();
13684 IEM_MC_END();
13685 return VINF_SUCCESS;
13686
13687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13688 }
13689 }
13690}
13691
13692
13693
13694
13695/** Opcode 0xc8. */
13696FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13697{
13698 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13699 IEMOP_HLP_MIN_186();
13700 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13701 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13702 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13704 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13705}
13706
13707
13708/** Opcode 0xc9. */
13709FNIEMOP_DEF(iemOp_leave)
13710{
13711 IEMOP_MNEMONIC(leave, "leave");
13712 IEMOP_HLP_MIN_186();
13713 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13715 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13716}
13717
13718
13719/** Opcode 0xca. */
13720FNIEMOP_DEF(iemOp_retf_Iw)
13721{
13722 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13723 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13725 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13726 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13727}
13728
13729
13730/** Opcode 0xcb. */
13731FNIEMOP_DEF(iemOp_retf)
13732{
13733 IEMOP_MNEMONIC(retf, "retf");
13734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13736 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13737}
13738
13739
13740/** Opcode 0xcc. */
13741FNIEMOP_DEF(iemOp_int_3)
13742{
13743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13744 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13745}
13746
13747
13748/** Opcode 0xcd. */
13749FNIEMOP_DEF(iemOp_int_Ib)
13750{
13751 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13753 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13754}
13755
13756
13757/** Opcode 0xce. */
13758FNIEMOP_DEF(iemOp_into)
13759{
13760 IEMOP_MNEMONIC(into, "into");
13761 IEMOP_HLP_NO_64BIT();
13762
13763 IEM_MC_BEGIN(2, 0);
13764 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13765 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13766 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13767 IEM_MC_END();
13768 return VINF_SUCCESS;
13769}
13770
13771
13772/** Opcode 0xcf. */
13773FNIEMOP_DEF(iemOp_iret)
13774{
13775 IEMOP_MNEMONIC(iret, "iret");
13776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13777 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13778}
13779
13780
13781/** Opcode 0xd0. */
13782FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13783{
13784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13785 PCIEMOPSHIFTSIZES pImpl;
13786 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13787 {
13788 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13789 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13790 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13791 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13792 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13793 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13794 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13795 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13796 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13797 }
13798 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13799
13800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13801 {
13802 /* register */
13803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13804 IEM_MC_BEGIN(3, 0);
13805 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13806 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13807 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13808 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13809 IEM_MC_REF_EFLAGS(pEFlags);
13810 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13811 IEM_MC_ADVANCE_RIP();
13812 IEM_MC_END();
13813 }
13814 else
13815 {
13816 /* memory */
13817 IEM_MC_BEGIN(3, 2);
13818 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13819 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13820 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13822
13823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13825 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13826 IEM_MC_FETCH_EFLAGS(EFlags);
13827 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13828
13829 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13830 IEM_MC_COMMIT_EFLAGS(EFlags);
13831 IEM_MC_ADVANCE_RIP();
13832 IEM_MC_END();
13833 }
13834 return VINF_SUCCESS;
13835}
13836
13837
13838
13839/** Opcode 0xd1. */
13840FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13841{
13842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13843 PCIEMOPSHIFTSIZES pImpl;
13844 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13845 {
13846 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13847 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13848 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13849 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13850 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13851 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13852 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13853 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13854 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13855 }
13856 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13857
13858 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13859 {
13860 /* register */
13861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13862 switch (pVCpu->iem.s.enmEffOpSize)
13863 {
13864 case IEMMODE_16BIT:
13865 IEM_MC_BEGIN(3, 0);
13866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13867 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13868 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13869 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13870 IEM_MC_REF_EFLAGS(pEFlags);
13871 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13872 IEM_MC_ADVANCE_RIP();
13873 IEM_MC_END();
13874 return VINF_SUCCESS;
13875
13876 case IEMMODE_32BIT:
13877 IEM_MC_BEGIN(3, 0);
13878 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13879 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13880 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13881 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13882 IEM_MC_REF_EFLAGS(pEFlags);
13883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13884 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13885 IEM_MC_ADVANCE_RIP();
13886 IEM_MC_END();
13887 return VINF_SUCCESS;
13888
13889 case IEMMODE_64BIT:
13890 IEM_MC_BEGIN(3, 0);
13891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13892 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13893 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13894 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13895 IEM_MC_REF_EFLAGS(pEFlags);
13896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13897 IEM_MC_ADVANCE_RIP();
13898 IEM_MC_END();
13899 return VINF_SUCCESS;
13900
13901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13902 }
13903 }
13904 else
13905 {
13906 /* memory */
13907 switch (pVCpu->iem.s.enmEffOpSize)
13908 {
13909 case IEMMODE_16BIT:
13910 IEM_MC_BEGIN(3, 2);
13911 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13912 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13913 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13915
13916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13918 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13919 IEM_MC_FETCH_EFLAGS(EFlags);
13920 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13921
13922 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13923 IEM_MC_COMMIT_EFLAGS(EFlags);
13924 IEM_MC_ADVANCE_RIP();
13925 IEM_MC_END();
13926 return VINF_SUCCESS;
13927
13928 case IEMMODE_32BIT:
13929 IEM_MC_BEGIN(3, 2);
13930 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13931 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13932 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13934
13935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13937 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13938 IEM_MC_FETCH_EFLAGS(EFlags);
13939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13940
13941 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13942 IEM_MC_COMMIT_EFLAGS(EFlags);
13943 IEM_MC_ADVANCE_RIP();
13944 IEM_MC_END();
13945 return VINF_SUCCESS;
13946
13947 case IEMMODE_64BIT:
13948 IEM_MC_BEGIN(3, 2);
13949 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13950 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13951 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13953
13954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13956 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13957 IEM_MC_FETCH_EFLAGS(EFlags);
13958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13959
13960 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13961 IEM_MC_COMMIT_EFLAGS(EFlags);
13962 IEM_MC_ADVANCE_RIP();
13963 IEM_MC_END();
13964 return VINF_SUCCESS;
13965
13966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13967 }
13968 }
13969}
13970
13971
13972/** Opcode 0xd2. */
13973FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13974{
13975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13976 PCIEMOPSHIFTSIZES pImpl;
13977 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13978 {
13979 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13980 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13981 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13982 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13983 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13984 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13985 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13986 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13987 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13988 }
13989 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13990
13991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13992 {
13993 /* register */
13994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13995 IEM_MC_BEGIN(3, 0);
13996 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13997 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13999 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14000 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14001 IEM_MC_REF_EFLAGS(pEFlags);
14002 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14003 IEM_MC_ADVANCE_RIP();
14004 IEM_MC_END();
14005 }
14006 else
14007 {
14008 /* memory */
14009 IEM_MC_BEGIN(3, 2);
14010 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14011 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14012 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14014
14015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14017 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14018 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14019 IEM_MC_FETCH_EFLAGS(EFlags);
14020 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14021
14022 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14023 IEM_MC_COMMIT_EFLAGS(EFlags);
14024 IEM_MC_ADVANCE_RIP();
14025 IEM_MC_END();
14026 }
14027 return VINF_SUCCESS;
14028}
14029
14030
14031/** Opcode 0xd3. */
14032FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
14033{
14034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14035 PCIEMOPSHIFTSIZES pImpl;
14036 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14037 {
14038 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
14039 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
14040 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
14041 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
14042 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
14043 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
14044 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
14045 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14046 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
14047 }
14048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14049
14050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14051 {
14052 /* register */
14053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14054 switch (pVCpu->iem.s.enmEffOpSize)
14055 {
14056 case IEMMODE_16BIT:
14057 IEM_MC_BEGIN(3, 0);
14058 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14059 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14060 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14061 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14062 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14063 IEM_MC_REF_EFLAGS(pEFlags);
14064 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14065 IEM_MC_ADVANCE_RIP();
14066 IEM_MC_END();
14067 return VINF_SUCCESS;
14068
14069 case IEMMODE_32BIT:
14070 IEM_MC_BEGIN(3, 0);
14071 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14072 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14073 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14074 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14075 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14076 IEM_MC_REF_EFLAGS(pEFlags);
14077 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14078 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14079 IEM_MC_ADVANCE_RIP();
14080 IEM_MC_END();
14081 return VINF_SUCCESS;
14082
14083 case IEMMODE_64BIT:
14084 IEM_MC_BEGIN(3, 0);
14085 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14086 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14087 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14088 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14089 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14090 IEM_MC_REF_EFLAGS(pEFlags);
14091 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14092 IEM_MC_ADVANCE_RIP();
14093 IEM_MC_END();
14094 return VINF_SUCCESS;
14095
14096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14097 }
14098 }
14099 else
14100 {
14101 /* memory */
14102 switch (pVCpu->iem.s.enmEffOpSize)
14103 {
14104 case IEMMODE_16BIT:
14105 IEM_MC_BEGIN(3, 2);
14106 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14107 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14108 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14110
14111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14113 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14114 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14115 IEM_MC_FETCH_EFLAGS(EFlags);
14116 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14117
14118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14119 IEM_MC_COMMIT_EFLAGS(EFlags);
14120 IEM_MC_ADVANCE_RIP();
14121 IEM_MC_END();
14122 return VINF_SUCCESS;
14123
14124 case IEMMODE_32BIT:
14125 IEM_MC_BEGIN(3, 2);
14126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14127 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14128 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14130
14131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14133 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14134 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14135 IEM_MC_FETCH_EFLAGS(EFlags);
14136 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14137
14138 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14139 IEM_MC_COMMIT_EFLAGS(EFlags);
14140 IEM_MC_ADVANCE_RIP();
14141 IEM_MC_END();
14142 return VINF_SUCCESS;
14143
14144 case IEMMODE_64BIT:
14145 IEM_MC_BEGIN(3, 2);
14146 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14147 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14148 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14150
14151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14153 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14154 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14155 IEM_MC_FETCH_EFLAGS(EFlags);
14156 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14157
14158 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14159 IEM_MC_COMMIT_EFLAGS(EFlags);
14160 IEM_MC_ADVANCE_RIP();
14161 IEM_MC_END();
14162 return VINF_SUCCESS;
14163
14164 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14165 }
14166 }
14167}
14168
14169/** Opcode 0xd4. */
14170FNIEMOP_DEF(iemOp_aam_Ib)
14171{
14172 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14173 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14175 IEMOP_HLP_NO_64BIT();
14176 if (!bImm)
14177 return IEMOP_RAISE_DIVIDE_ERROR();
14178 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14179}
14180
14181
14182/** Opcode 0xd5. */
14183FNIEMOP_DEF(iemOp_aad_Ib)
14184{
14185 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14186 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14188 IEMOP_HLP_NO_64BIT();
14189 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14190}
14191
14192
14193/** Opcode 0xd6. */
14194FNIEMOP_DEF(iemOp_salc)
14195{
14196 IEMOP_MNEMONIC(salc, "salc");
14197 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14198 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14200 IEMOP_HLP_NO_64BIT();
14201
14202 IEM_MC_BEGIN(0, 0);
14203 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14204 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14205 } IEM_MC_ELSE() {
14206 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14207 } IEM_MC_ENDIF();
14208 IEM_MC_ADVANCE_RIP();
14209 IEM_MC_END();
14210 return VINF_SUCCESS;
14211}
14212
14213
14214/** Opcode 0xd7. */
14215FNIEMOP_DEF(iemOp_xlat)
14216{
14217 IEMOP_MNEMONIC(xlat, "xlat");
14218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14219 switch (pVCpu->iem.s.enmEffAddrMode)
14220 {
14221 case IEMMODE_16BIT:
14222 IEM_MC_BEGIN(2, 0);
14223 IEM_MC_LOCAL(uint8_t, u8Tmp);
14224 IEM_MC_LOCAL(uint16_t, u16Addr);
14225 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14226 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14227 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14228 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14229 IEM_MC_ADVANCE_RIP();
14230 IEM_MC_END();
14231 return VINF_SUCCESS;
14232
14233 case IEMMODE_32BIT:
14234 IEM_MC_BEGIN(2, 0);
14235 IEM_MC_LOCAL(uint8_t, u8Tmp);
14236 IEM_MC_LOCAL(uint32_t, u32Addr);
14237 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14238 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14239 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14240 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14241 IEM_MC_ADVANCE_RIP();
14242 IEM_MC_END();
14243 return VINF_SUCCESS;
14244
14245 case IEMMODE_64BIT:
14246 IEM_MC_BEGIN(2, 0);
14247 IEM_MC_LOCAL(uint8_t, u8Tmp);
14248 IEM_MC_LOCAL(uint64_t, u64Addr);
14249 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14250 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14251 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14252 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14253 IEM_MC_ADVANCE_RIP();
14254 IEM_MC_END();
14255 return VINF_SUCCESS;
14256
14257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14258 }
14259}
14260
14261
14262/**
14263 * Common worker for FPU instructions working on ST0 and STn, and storing the
14264 * result in ST0.
14265 *
14266 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14267 */
14268FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14269{
14270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14271
14272 IEM_MC_BEGIN(3, 1);
14273 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14274 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14275 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14276 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14277
14278 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14279 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14280 IEM_MC_PREPARE_FPU_USAGE();
14281 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14282 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14283 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14284 IEM_MC_ELSE()
14285 IEM_MC_FPU_STACK_UNDERFLOW(0);
14286 IEM_MC_ENDIF();
14287 IEM_MC_ADVANCE_RIP();
14288
14289 IEM_MC_END();
14290 return VINF_SUCCESS;
14291}
14292
14293
14294/**
14295 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14296 * flags.
14297 *
14298 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14299 */
14300FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14301{
14302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14303
14304 IEM_MC_BEGIN(3, 1);
14305 IEM_MC_LOCAL(uint16_t, u16Fsw);
14306 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14307 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14308 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14309
14310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14312 IEM_MC_PREPARE_FPU_USAGE();
14313 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14314 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14315 IEM_MC_UPDATE_FSW(u16Fsw);
14316 IEM_MC_ELSE()
14317 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14318 IEM_MC_ENDIF();
14319 IEM_MC_ADVANCE_RIP();
14320
14321 IEM_MC_END();
14322 return VINF_SUCCESS;
14323}
14324
14325
14326/**
14327 * Common worker for FPU instructions working on ST0 and STn, only affecting
14328 * flags, and popping when done.
14329 *
14330 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14331 */
14332FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14333{
14334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14335
14336 IEM_MC_BEGIN(3, 1);
14337 IEM_MC_LOCAL(uint16_t, u16Fsw);
14338 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14339 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14340 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14341
14342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14344 IEM_MC_PREPARE_FPU_USAGE();
14345 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14346 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14347 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14348 IEM_MC_ELSE()
14349 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14350 IEM_MC_ENDIF();
14351 IEM_MC_ADVANCE_RIP();
14352
14353 IEM_MC_END();
14354 return VINF_SUCCESS;
14355}
14356
14357
14358/** Opcode 0xd8 11/0. */
14359FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14360{
14361 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14362 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14363}
14364
14365
14366/** Opcode 0xd8 11/1. */
14367FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14368{
14369 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14370 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14371}
14372
14373
14374/** Opcode 0xd8 11/2. */
14375FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14376{
14377 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14378 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14379}
14380
14381
14382/** Opcode 0xd8 11/3. */
14383FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14384{
14385 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14386 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14387}
14388
14389
14390/** Opcode 0xd8 11/4. */
14391FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14392{
14393 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14394 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14395}
14396
14397
14398/** Opcode 0xd8 11/5. */
14399FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14400{
14401 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14402 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14403}
14404
14405
14406/** Opcode 0xd8 11/6. */
14407FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14408{
14409 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14410 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14411}
14412
14413
14414/** Opcode 0xd8 11/7. */
14415FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14416{
14417 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14418 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14419}
14420
14421
14422/**
14423 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14424 * the result in ST0.
14425 *
14426 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14427 */
14428FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14429{
14430 IEM_MC_BEGIN(3, 3);
14431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14432 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14433 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14434 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14435 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14436 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14437
14438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14440
14441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14442 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14443 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14444
14445 IEM_MC_PREPARE_FPU_USAGE();
14446 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14447 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14448 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14449 IEM_MC_ELSE()
14450 IEM_MC_FPU_STACK_UNDERFLOW(0);
14451 IEM_MC_ENDIF();
14452 IEM_MC_ADVANCE_RIP();
14453
14454 IEM_MC_END();
14455 return VINF_SUCCESS;
14456}
14457
14458
14459/** Opcode 0xd8 !11/0. */
14460FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14461{
14462 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14463 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14464}
14465
14466
14467/** Opcode 0xd8 !11/1. */
14468FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14469{
14470 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14471 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14472}
14473
14474
14475/** Opcode 0xd8 !11/2. */
14476FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14477{
14478 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14479
14480 IEM_MC_BEGIN(3, 3);
14481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14482 IEM_MC_LOCAL(uint16_t, u16Fsw);
14483 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14484 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14485 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14486 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14487
14488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14490
14491 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14492 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14493 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14494
14495 IEM_MC_PREPARE_FPU_USAGE();
14496 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14497 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14498 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14499 IEM_MC_ELSE()
14500 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14501 IEM_MC_ENDIF();
14502 IEM_MC_ADVANCE_RIP();
14503
14504 IEM_MC_END();
14505 return VINF_SUCCESS;
14506}
14507
14508
14509/** Opcode 0xd8 !11/3. */
14510FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14511{
14512 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14513
14514 IEM_MC_BEGIN(3, 3);
14515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14516 IEM_MC_LOCAL(uint16_t, u16Fsw);
14517 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14518 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14519 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14520 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14521
14522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14524
14525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14526 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14527 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14528
14529 IEM_MC_PREPARE_FPU_USAGE();
14530 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14531 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14532 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14533 IEM_MC_ELSE()
14534 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14535 IEM_MC_ENDIF();
14536 IEM_MC_ADVANCE_RIP();
14537
14538 IEM_MC_END();
14539 return VINF_SUCCESS;
14540}
14541
14542
14543/** Opcode 0xd8 !11/4. */
14544FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14545{
14546 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14547 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14548}
14549
14550
14551/** Opcode 0xd8 !11/5. */
14552FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14553{
14554 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14555 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14556}
14557
14558
14559/** Opcode 0xd8 !11/6. */
14560FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14561{
14562 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14563 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14564}
14565
14566
14567/** Opcode 0xd8 !11/7. */
14568FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14569{
14570 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14571 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14572}
14573
14574
14575/** Opcode 0xd8. */
14576FNIEMOP_DEF(iemOp_EscF0)
14577{
14578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14579 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14580
14581 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14582 {
14583 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14584 {
14585 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14586 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14587 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14588 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14589 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14590 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14591 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14592 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14594 }
14595 }
14596 else
14597 {
14598 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14599 {
14600 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14601 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14602 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14603 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14604 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14605 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14606 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14607 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14609 }
14610 }
14611}
14612
14613
14614/** Opcode 0xd9 /0 mem32real
14615 * @sa iemOp_fld_m64r */
14616FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14617{
14618 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14619
14620 IEM_MC_BEGIN(2, 3);
14621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14622 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14623 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14624 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14625 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14626
14627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14629
14630 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14631 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14632 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14633
14634 IEM_MC_PREPARE_FPU_USAGE();
14635 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14636 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14637 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14638 IEM_MC_ELSE()
14639 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14640 IEM_MC_ENDIF();
14641 IEM_MC_ADVANCE_RIP();
14642
14643 IEM_MC_END();
14644 return VINF_SUCCESS;
14645}
14646
14647
14648/** Opcode 0xd9 !11/2 mem32real */
14649FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14650{
14651 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14652 IEM_MC_BEGIN(3, 2);
14653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14654 IEM_MC_LOCAL(uint16_t, u16Fsw);
14655 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14656 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14657 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14658
14659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14661 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14662 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14663
14664 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14665 IEM_MC_PREPARE_FPU_USAGE();
14666 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14667 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14668 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14669 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14670 IEM_MC_ELSE()
14671 IEM_MC_IF_FCW_IM()
14672 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14673 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14674 IEM_MC_ENDIF();
14675 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14676 IEM_MC_ENDIF();
14677 IEM_MC_ADVANCE_RIP();
14678
14679 IEM_MC_END();
14680 return VINF_SUCCESS;
14681}
14682
14683
14684/** Opcode 0xd9 !11/3 */
14685FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14686{
14687 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14688 IEM_MC_BEGIN(3, 2);
14689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14690 IEM_MC_LOCAL(uint16_t, u16Fsw);
14691 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14692 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14693 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14694
14695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14697 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14698 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14699
14700 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14701 IEM_MC_PREPARE_FPU_USAGE();
14702 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14703 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14704 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14705 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14706 IEM_MC_ELSE()
14707 IEM_MC_IF_FCW_IM()
14708 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14709 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14710 IEM_MC_ENDIF();
14711 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14712 IEM_MC_ENDIF();
14713 IEM_MC_ADVANCE_RIP();
14714
14715 IEM_MC_END();
14716 return VINF_SUCCESS;
14717}
14718
14719
14720/** Opcode 0xd9 !11/4 */
14721FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14722{
14723 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14724 IEM_MC_BEGIN(3, 0);
14725 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14726 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14727 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14730 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14731 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14732 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14733 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14734 IEM_MC_END();
14735 return VINF_SUCCESS;
14736}
14737
14738
14739/** Opcode 0xd9 !11/5 */
14740FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14741{
14742 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14743 IEM_MC_BEGIN(1, 1);
14744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14745 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14748 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14749 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14750 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14751 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14752 IEM_MC_END();
14753 return VINF_SUCCESS;
14754}
14755
14756
14757/** Opcode 0xd9 !11/6 */
14758FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14759{
14760 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14761 IEM_MC_BEGIN(3, 0);
14762 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14763 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14764 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14767 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14768 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14769 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14770 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14771 IEM_MC_END();
14772 return VINF_SUCCESS;
14773}
14774
14775
14776/** Opcode 0xd9 !11/7 */
14777FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14778{
14779 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14780 IEM_MC_BEGIN(2, 0);
14781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14782 IEM_MC_LOCAL(uint16_t, u16Fcw);
14783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14785 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14786 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14787 IEM_MC_FETCH_FCW(u16Fcw);
14788 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14789 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14790 IEM_MC_END();
14791 return VINF_SUCCESS;
14792}
14793
14794
14795/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14796FNIEMOP_DEF(iemOp_fnop)
14797{
14798 IEMOP_MNEMONIC(fnop, "fnop");
14799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14800
14801 IEM_MC_BEGIN(0, 0);
14802 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14803 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14804 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14805 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14806 * intel optimizations. Investigate. */
14807 IEM_MC_UPDATE_FPU_OPCODE_IP();
14808 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14809 IEM_MC_END();
14810 return VINF_SUCCESS;
14811}
14812
14813
14814/** Opcode 0xd9 11/0 stN */
14815FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14816{
14817 IEMOP_MNEMONIC(fld_stN, "fld stN");
14818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14819
14820 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14821 * indicates that it does. */
14822 IEM_MC_BEGIN(0, 2);
14823 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14824 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14827
14828 IEM_MC_PREPARE_FPU_USAGE();
14829 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14830 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14831 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14832 IEM_MC_ELSE()
14833 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14834 IEM_MC_ENDIF();
14835
14836 IEM_MC_ADVANCE_RIP();
14837 IEM_MC_END();
14838
14839 return VINF_SUCCESS;
14840}
14841
14842
14843/** Opcode 0xd9 11/3 stN */
14844FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14845{
14846 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14848
14849 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14850 * indicates that it does. */
14851 IEM_MC_BEGIN(1, 3);
14852 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14853 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14854 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14855 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14856 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14857 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14858
14859 IEM_MC_PREPARE_FPU_USAGE();
14860 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14861 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14862 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14863 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14864 IEM_MC_ELSE()
14865 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14866 IEM_MC_ENDIF();
14867
14868 IEM_MC_ADVANCE_RIP();
14869 IEM_MC_END();
14870
14871 return VINF_SUCCESS;
14872}
14873
14874
14875/** Opcode 0xd9 11/4, 0xdd 11/2. */
14876FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14877{
14878 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14880
14881 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14882 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14883 if (!iDstReg)
14884 {
14885 IEM_MC_BEGIN(0, 1);
14886 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14888 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14889
14890 IEM_MC_PREPARE_FPU_USAGE();
14891 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14892 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14893 IEM_MC_ELSE()
14894 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14895 IEM_MC_ENDIF();
14896
14897 IEM_MC_ADVANCE_RIP();
14898 IEM_MC_END();
14899 }
14900 else
14901 {
14902 IEM_MC_BEGIN(0, 2);
14903 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14904 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14905 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14906 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14907
14908 IEM_MC_PREPARE_FPU_USAGE();
14909 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14910 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14911 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14912 IEM_MC_ELSE()
14913 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14914 IEM_MC_ENDIF();
14915
14916 IEM_MC_ADVANCE_RIP();
14917 IEM_MC_END();
14918 }
14919 return VINF_SUCCESS;
14920}
14921
14922
14923/**
14924 * Common worker for FPU instructions working on ST0 and replaces it with the
14925 * result, i.e. unary operators.
14926 *
14927 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14928 */
14929FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14930{
14931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14932
14933 IEM_MC_BEGIN(2, 1);
14934 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14935 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14936 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14937
14938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14940 IEM_MC_PREPARE_FPU_USAGE();
14941 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14942 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14943 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14944 IEM_MC_ELSE()
14945 IEM_MC_FPU_STACK_UNDERFLOW(0);
14946 IEM_MC_ENDIF();
14947 IEM_MC_ADVANCE_RIP();
14948
14949 IEM_MC_END();
14950 return VINF_SUCCESS;
14951}
14952
14953
14954/** Opcode 0xd9 0xe0. */
14955FNIEMOP_DEF(iemOp_fchs)
14956{
14957 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14958 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14959}
14960
14961
14962/** Opcode 0xd9 0xe1. */
14963FNIEMOP_DEF(iemOp_fabs)
14964{
14965 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14966 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14967}
14968
14969
14970/**
14971 * Common worker for FPU instructions working on ST0 and only returns FSW.
14972 *
14973 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14974 */
14975FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14976{
14977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14978
14979 IEM_MC_BEGIN(2, 1);
14980 IEM_MC_LOCAL(uint16_t, u16Fsw);
14981 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14982 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14983
14984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14986 IEM_MC_PREPARE_FPU_USAGE();
14987 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14988 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14989 IEM_MC_UPDATE_FSW(u16Fsw);
14990 IEM_MC_ELSE()
14991 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14992 IEM_MC_ENDIF();
14993 IEM_MC_ADVANCE_RIP();
14994
14995 IEM_MC_END();
14996 return VINF_SUCCESS;
14997}
14998
14999
15000/** Opcode 0xd9 0xe4. */
15001FNIEMOP_DEF(iemOp_ftst)
15002{
15003 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
15004 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
15005}
15006
15007
15008/** Opcode 0xd9 0xe5. */
15009FNIEMOP_DEF(iemOp_fxam)
15010{
15011 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
15012 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
15013}
15014
15015
15016/**
15017 * Common worker for FPU instructions pushing a constant onto the FPU stack.
15018 *
15019 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15020 */
15021FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
15022{
15023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15024
15025 IEM_MC_BEGIN(1, 1);
15026 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15027 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15028
15029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15031 IEM_MC_PREPARE_FPU_USAGE();
15032 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15033 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
15034 IEM_MC_PUSH_FPU_RESULT(FpuRes);
15035 IEM_MC_ELSE()
15036 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
15037 IEM_MC_ENDIF();
15038 IEM_MC_ADVANCE_RIP();
15039
15040 IEM_MC_END();
15041 return VINF_SUCCESS;
15042}
15043
15044
15045/** Opcode 0xd9 0xe8. */
15046FNIEMOP_DEF(iemOp_fld1)
15047{
15048 IEMOP_MNEMONIC(fld1, "fld1");
15049 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
15050}
15051
15052
15053/** Opcode 0xd9 0xe9. */
15054FNIEMOP_DEF(iemOp_fldl2t)
15055{
15056 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15057 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15058}
15059
15060
15061/** Opcode 0xd9 0xea. */
15062FNIEMOP_DEF(iemOp_fldl2e)
15063{
15064 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15065 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15066}
15067
15068/** Opcode 0xd9 0xeb. */
15069FNIEMOP_DEF(iemOp_fldpi)
15070{
15071 IEMOP_MNEMONIC(fldpi, "fldpi");
15072 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15073}
15074
15075
15076/** Opcode 0xd9 0xec. */
15077FNIEMOP_DEF(iemOp_fldlg2)
15078{
15079 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15080 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15081}
15082
15083/** Opcode 0xd9 0xed. */
15084FNIEMOP_DEF(iemOp_fldln2)
15085{
15086 IEMOP_MNEMONIC(fldln2, "fldln2");
15087 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15088}
15089
15090
15091/** Opcode 0xd9 0xee. */
15092FNIEMOP_DEF(iemOp_fldz)
15093{
15094 IEMOP_MNEMONIC(fldz, "fldz");
15095 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15096}
15097
15098
15099/** Opcode 0xd9 0xf0. */
15100FNIEMOP_DEF(iemOp_f2xm1)
15101{
15102 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15103 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15104}
15105
15106
15107/**
15108 * Common worker for FPU instructions working on STn and ST0, storing the result
15109 * in STn, and popping the stack unless IE, DE or ZE was raised.
15110 *
15111 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15112 */
15113FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15114{
15115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15116
15117 IEM_MC_BEGIN(3, 1);
15118 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15119 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15120 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15121 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15122
15123 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15124 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15125
15126 IEM_MC_PREPARE_FPU_USAGE();
15127 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15128 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15129 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15130 IEM_MC_ELSE()
15131 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15132 IEM_MC_ENDIF();
15133 IEM_MC_ADVANCE_RIP();
15134
15135 IEM_MC_END();
15136 return VINF_SUCCESS;
15137}
15138
15139
15140/** Opcode 0xd9 0xf1. */
15141FNIEMOP_DEF(iemOp_fyl2x)
15142{
15143 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15144 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15145}
15146
15147
15148/**
15149 * Common worker for FPU instructions working on ST0 and having two outputs, one
15150 * replacing ST0 and one pushed onto the stack.
15151 *
15152 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15153 */
15154FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15155{
15156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15157
15158 IEM_MC_BEGIN(2, 1);
15159 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15160 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15161 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15162
15163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15165 IEM_MC_PREPARE_FPU_USAGE();
15166 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15167 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15168 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15169 IEM_MC_ELSE()
15170 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15171 IEM_MC_ENDIF();
15172 IEM_MC_ADVANCE_RIP();
15173
15174 IEM_MC_END();
15175 return VINF_SUCCESS;
15176}
15177
15178
15179/** Opcode 0xd9 0xf2. */
15180FNIEMOP_DEF(iemOp_fptan)
15181{
15182 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15183 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15184}
15185
15186
15187/** Opcode 0xd9 0xf3. */
15188FNIEMOP_DEF(iemOp_fpatan)
15189{
15190 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15191 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15192}
15193
15194
15195/** Opcode 0xd9 0xf4. */
15196FNIEMOP_DEF(iemOp_fxtract)
15197{
15198 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15199 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15200}
15201
15202
15203/** Opcode 0xd9 0xf5. */
15204FNIEMOP_DEF(iemOp_fprem1)
15205{
15206 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15207 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15208}
15209
15210
15211/** Opcode 0xd9 0xf6. */
15212FNIEMOP_DEF(iemOp_fdecstp)
15213{
15214 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15216 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15217 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15218 * FINCSTP and FDECSTP. */
15219
15220 IEM_MC_BEGIN(0,0);
15221
15222 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15223 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15224
15225 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15226 IEM_MC_FPU_STACK_DEC_TOP();
15227 IEM_MC_UPDATE_FSW_CONST(0);
15228
15229 IEM_MC_ADVANCE_RIP();
15230 IEM_MC_END();
15231 return VINF_SUCCESS;
15232}
15233
15234
15235/** Opcode 0xd9 0xf7. */
15236FNIEMOP_DEF(iemOp_fincstp)
15237{
15238 IEMOP_MNEMONIC(fincstp, "fincstp");
15239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15240 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15241 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15242 * FINCSTP and FDECSTP. */
15243
15244 IEM_MC_BEGIN(0,0);
15245
15246 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15247 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15248
15249 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15250 IEM_MC_FPU_STACK_INC_TOP();
15251 IEM_MC_UPDATE_FSW_CONST(0);
15252
15253 IEM_MC_ADVANCE_RIP();
15254 IEM_MC_END();
15255 return VINF_SUCCESS;
15256}
15257
15258
15259/** Opcode 0xd9 0xf8. */
15260FNIEMOP_DEF(iemOp_fprem)
15261{
15262 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15263 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15264}
15265
15266
15267/** Opcode 0xd9 0xf9. */
15268FNIEMOP_DEF(iemOp_fyl2xp1)
15269{
15270 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15271 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15272}
15273
15274
15275/** Opcode 0xd9 0xfa. */
15276FNIEMOP_DEF(iemOp_fsqrt)
15277{
15278 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15279 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15280}
15281
15282
15283/** Opcode 0xd9 0xfb. */
15284FNIEMOP_DEF(iemOp_fsincos)
15285{
15286 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15287 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15288}
15289
15290
15291/** Opcode 0xd9 0xfc. */
15292FNIEMOP_DEF(iemOp_frndint)
15293{
15294 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15295 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15296}
15297
15298
15299/** Opcode 0xd9 0xfd. */
15300FNIEMOP_DEF(iemOp_fscale)
15301{
15302 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15303 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15304}
15305
15306
15307/** Opcode 0xd9 0xfe. */
15308FNIEMOP_DEF(iemOp_fsin)
15309{
15310 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15311 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15312}
15313
15314
15315/** Opcode 0xd9 0xff. */
15316FNIEMOP_DEF(iemOp_fcos)
15317{
15318 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15319 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15320}
15321
15322
15323/** Used by iemOp_EscF1. */
15324IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15325{
15326 /* 0xe0 */ iemOp_fchs,
15327 /* 0xe1 */ iemOp_fabs,
15328 /* 0xe2 */ iemOp_Invalid,
15329 /* 0xe3 */ iemOp_Invalid,
15330 /* 0xe4 */ iemOp_ftst,
15331 /* 0xe5 */ iemOp_fxam,
15332 /* 0xe6 */ iemOp_Invalid,
15333 /* 0xe7 */ iemOp_Invalid,
15334 /* 0xe8 */ iemOp_fld1,
15335 /* 0xe9 */ iemOp_fldl2t,
15336 /* 0xea */ iemOp_fldl2e,
15337 /* 0xeb */ iemOp_fldpi,
15338 /* 0xec */ iemOp_fldlg2,
15339 /* 0xed */ iemOp_fldln2,
15340 /* 0xee */ iemOp_fldz,
15341 /* 0xef */ iemOp_Invalid,
15342 /* 0xf0 */ iemOp_f2xm1,
15343 /* 0xf1 */ iemOp_fyl2x,
15344 /* 0xf2 */ iemOp_fptan,
15345 /* 0xf3 */ iemOp_fpatan,
15346 /* 0xf4 */ iemOp_fxtract,
15347 /* 0xf5 */ iemOp_fprem1,
15348 /* 0xf6 */ iemOp_fdecstp,
15349 /* 0xf7 */ iemOp_fincstp,
15350 /* 0xf8 */ iemOp_fprem,
15351 /* 0xf9 */ iemOp_fyl2xp1,
15352 /* 0xfa */ iemOp_fsqrt,
15353 /* 0xfb */ iemOp_fsincos,
15354 /* 0xfc */ iemOp_frndint,
15355 /* 0xfd */ iemOp_fscale,
15356 /* 0xfe */ iemOp_fsin,
15357 /* 0xff */ iemOp_fcos
15358};
15359
15360
15361/** Opcode 0xd9. */
15362FNIEMOP_DEF(iemOp_EscF1)
15363{
15364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15365 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15366
15367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15368 {
15369 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15370 {
15371 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15372 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15373 case 2:
15374 if (bRm == 0xd0)
15375 return FNIEMOP_CALL(iemOp_fnop);
15376 return IEMOP_RAISE_INVALID_OPCODE();
15377 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15378 case 4:
15379 case 5:
15380 case 6:
15381 case 7:
15382 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15383 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15385 }
15386 }
15387 else
15388 {
15389 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15390 {
15391 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15392 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15393 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15394 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15395 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15396 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15397 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15398 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15400 }
15401 }
15402}
15403
15404
15405/** Opcode 0xda 11/0. */
15406FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15407{
15408 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15410
15411 IEM_MC_BEGIN(0, 1);
15412 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15413
15414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15415 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15416
15417 IEM_MC_PREPARE_FPU_USAGE();
15418 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15419 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15420 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15421 IEM_MC_ENDIF();
15422 IEM_MC_UPDATE_FPU_OPCODE_IP();
15423 IEM_MC_ELSE()
15424 IEM_MC_FPU_STACK_UNDERFLOW(0);
15425 IEM_MC_ENDIF();
15426 IEM_MC_ADVANCE_RIP();
15427
15428 IEM_MC_END();
15429 return VINF_SUCCESS;
15430}
15431
15432
15433/** Opcode 0xda 11/1. */
15434FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15435{
15436 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15438
15439 IEM_MC_BEGIN(0, 1);
15440 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15441
15442 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15443 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15444
15445 IEM_MC_PREPARE_FPU_USAGE();
15446 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15447 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15448 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15449 IEM_MC_ENDIF();
15450 IEM_MC_UPDATE_FPU_OPCODE_IP();
15451 IEM_MC_ELSE()
15452 IEM_MC_FPU_STACK_UNDERFLOW(0);
15453 IEM_MC_ENDIF();
15454 IEM_MC_ADVANCE_RIP();
15455
15456 IEM_MC_END();
15457 return VINF_SUCCESS;
15458}
15459
15460
15461/** Opcode 0xda 11/2. */
15462FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15463{
15464 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15466
15467 IEM_MC_BEGIN(0, 1);
15468 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15469
15470 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15471 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15472
15473 IEM_MC_PREPARE_FPU_USAGE();
15474 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15475 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15476 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15477 IEM_MC_ENDIF();
15478 IEM_MC_UPDATE_FPU_OPCODE_IP();
15479 IEM_MC_ELSE()
15480 IEM_MC_FPU_STACK_UNDERFLOW(0);
15481 IEM_MC_ENDIF();
15482 IEM_MC_ADVANCE_RIP();
15483
15484 IEM_MC_END();
15485 return VINF_SUCCESS;
15486}
15487
15488
15489/** Opcode 0xda 11/3. */
15490FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15491{
15492 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15494
15495 IEM_MC_BEGIN(0, 1);
15496 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15497
15498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15499 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15500
15501 IEM_MC_PREPARE_FPU_USAGE();
15502 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15503 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15504 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15505 IEM_MC_ENDIF();
15506 IEM_MC_UPDATE_FPU_OPCODE_IP();
15507 IEM_MC_ELSE()
15508 IEM_MC_FPU_STACK_UNDERFLOW(0);
15509 IEM_MC_ENDIF();
15510 IEM_MC_ADVANCE_RIP();
15511
15512 IEM_MC_END();
15513 return VINF_SUCCESS;
15514}
15515
15516
15517/**
15518 * Common worker for FPU instructions working on ST0 and STn, only affecting
15519 * flags, and popping twice when done.
15520 *
15521 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15522 */
15523FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15524{
15525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15526
15527 IEM_MC_BEGIN(3, 1);
15528 IEM_MC_LOCAL(uint16_t, u16Fsw);
15529 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15530 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15531 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15532
15533 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15534 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15535
15536 IEM_MC_PREPARE_FPU_USAGE();
15537 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15538 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15539 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15540 IEM_MC_ELSE()
15541 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15542 IEM_MC_ENDIF();
15543 IEM_MC_ADVANCE_RIP();
15544
15545 IEM_MC_END();
15546 return VINF_SUCCESS;
15547}
15548
15549
15550/** Opcode 0xda 0xe9. */
15551FNIEMOP_DEF(iemOp_fucompp)
15552{
15553 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15554 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15555}
15556
15557
15558/**
15559 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15560 * the result in ST0.
15561 *
15562 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15563 */
15564FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15565{
15566 IEM_MC_BEGIN(3, 3);
15567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15568 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15569 IEM_MC_LOCAL(int32_t, i32Val2);
15570 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15571 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15572 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15573
15574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15576
15577 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15578 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15579 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15580
15581 IEM_MC_PREPARE_FPU_USAGE();
15582 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15583 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15584 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15585 IEM_MC_ELSE()
15586 IEM_MC_FPU_STACK_UNDERFLOW(0);
15587 IEM_MC_ENDIF();
15588 IEM_MC_ADVANCE_RIP();
15589
15590 IEM_MC_END();
15591 return VINF_SUCCESS;
15592}
15593
15594
15595/** Opcode 0xda !11/0. */
15596FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15597{
15598 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15599 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15600}
15601
15602
15603/** Opcode 0xda !11/1. */
15604FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15605{
15606 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15607 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15608}
15609
15610
15611/** Opcode 0xda !11/2. */
15612FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15613{
15614 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15615
15616 IEM_MC_BEGIN(3, 3);
15617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15618 IEM_MC_LOCAL(uint16_t, u16Fsw);
15619 IEM_MC_LOCAL(int32_t, i32Val2);
15620 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15621 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15622 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15623
15624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15626
15627 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15628 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15629 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15630
15631 IEM_MC_PREPARE_FPU_USAGE();
15632 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15633 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15634 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15635 IEM_MC_ELSE()
15636 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15637 IEM_MC_ENDIF();
15638 IEM_MC_ADVANCE_RIP();
15639
15640 IEM_MC_END();
15641 return VINF_SUCCESS;
15642}
15643
15644
15645/** Opcode 0xda !11/3. */
15646FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15647{
15648 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15649
15650 IEM_MC_BEGIN(3, 3);
15651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15652 IEM_MC_LOCAL(uint16_t, u16Fsw);
15653 IEM_MC_LOCAL(int32_t, i32Val2);
15654 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15655 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15656 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15657
15658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15660
15661 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15662 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15663 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15664
15665 IEM_MC_PREPARE_FPU_USAGE();
15666 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15667 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15668 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15669 IEM_MC_ELSE()
15670 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15671 IEM_MC_ENDIF();
15672 IEM_MC_ADVANCE_RIP();
15673
15674 IEM_MC_END();
15675 return VINF_SUCCESS;
15676}
15677
15678
15679/** Opcode 0xda !11/4. */
15680FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15681{
15682 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15683 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15684}
15685
15686
15687/** Opcode 0xda !11/5. */
15688FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15689{
15690 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15691 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15692}
15693
15694
15695/** Opcode 0xda !11/6. */
15696FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15697{
15698 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15699 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15700}
15701
15702
15703/** Opcode 0xda !11/7. */
15704FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15705{
15706 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15707 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15708}
15709
15710
15711/** Opcode 0xda. */
15712FNIEMOP_DEF(iemOp_EscF2)
15713{
15714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15715 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15717 {
15718 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15719 {
15720 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15721 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15722 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15723 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15724 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15725 case 5:
15726 if (bRm == 0xe9)
15727 return FNIEMOP_CALL(iemOp_fucompp);
15728 return IEMOP_RAISE_INVALID_OPCODE();
15729 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15730 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15732 }
15733 }
15734 else
15735 {
15736 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15737 {
15738 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15739 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15740 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15741 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15742 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15743 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15744 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15745 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15747 }
15748 }
15749}
15750
15751
15752/** Opcode 0xdb !11/0. */
15753FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15754{
15755 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15756
15757 IEM_MC_BEGIN(2, 3);
15758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15759 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15760 IEM_MC_LOCAL(int32_t, i32Val);
15761 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15762 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15763
15764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15766
15767 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15768 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15769 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15770
15771 IEM_MC_PREPARE_FPU_USAGE();
15772 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15773 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15774 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15775 IEM_MC_ELSE()
15776 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15777 IEM_MC_ENDIF();
15778 IEM_MC_ADVANCE_RIP();
15779
15780 IEM_MC_END();
15781 return VINF_SUCCESS;
15782}
15783
15784
15785/** Opcode 0xdb !11/1. */
15786FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15787{
15788 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15789 IEM_MC_BEGIN(3, 2);
15790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15791 IEM_MC_LOCAL(uint16_t, u16Fsw);
15792 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15793 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15794 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15795
15796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15798 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15799 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15800
15801 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15802 IEM_MC_PREPARE_FPU_USAGE();
15803 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15804 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15805 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15806 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15807 IEM_MC_ELSE()
15808 IEM_MC_IF_FCW_IM()
15809 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15810 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15811 IEM_MC_ENDIF();
15812 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15813 IEM_MC_ENDIF();
15814 IEM_MC_ADVANCE_RIP();
15815
15816 IEM_MC_END();
15817 return VINF_SUCCESS;
15818}
15819
15820
15821/** Opcode 0xdb !11/2. */
15822FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15823{
15824 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15825 IEM_MC_BEGIN(3, 2);
15826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15827 IEM_MC_LOCAL(uint16_t, u16Fsw);
15828 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15829 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15830 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15831
15832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15836
15837 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15838 IEM_MC_PREPARE_FPU_USAGE();
15839 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15840 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15841 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15842 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15843 IEM_MC_ELSE()
15844 IEM_MC_IF_FCW_IM()
15845 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15846 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15847 IEM_MC_ENDIF();
15848 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15849 IEM_MC_ENDIF();
15850 IEM_MC_ADVANCE_RIP();
15851
15852 IEM_MC_END();
15853 return VINF_SUCCESS;
15854}
15855
15856
15857/** Opcode 0xdb !11/3. */
15858FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15859{
15860 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15861 IEM_MC_BEGIN(3, 2);
15862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15863 IEM_MC_LOCAL(uint16_t, u16Fsw);
15864 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15865 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15866 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15867
15868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15870 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15871 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15872
15873 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15874 IEM_MC_PREPARE_FPU_USAGE();
15875 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15876 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15877 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15878 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15879 IEM_MC_ELSE()
15880 IEM_MC_IF_FCW_IM()
15881 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15882 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15883 IEM_MC_ENDIF();
15884 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15885 IEM_MC_ENDIF();
15886 IEM_MC_ADVANCE_RIP();
15887
15888 IEM_MC_END();
15889 return VINF_SUCCESS;
15890}
15891
15892
15893/** Opcode 0xdb !11/5. */
15894FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15895{
15896 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15897
15898 IEM_MC_BEGIN(2, 3);
15899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15900 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15901 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15902 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15903 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15904
15905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15907
15908 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15909 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15910 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15911
15912 IEM_MC_PREPARE_FPU_USAGE();
15913 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15914 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15915 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15916 IEM_MC_ELSE()
15917 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15918 IEM_MC_ENDIF();
15919 IEM_MC_ADVANCE_RIP();
15920
15921 IEM_MC_END();
15922 return VINF_SUCCESS;
15923}
15924
15925
15926/** Opcode 0xdb !11/7. */
15927FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15928{
15929 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15930 IEM_MC_BEGIN(3, 2);
15931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15932 IEM_MC_LOCAL(uint16_t, u16Fsw);
15933 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15934 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15935 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15936
15937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15939 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15940 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15941
15942 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15943 IEM_MC_PREPARE_FPU_USAGE();
15944 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15945 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15946 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15947 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15948 IEM_MC_ELSE()
15949 IEM_MC_IF_FCW_IM()
15950 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15951 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15952 IEM_MC_ENDIF();
15953 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15954 IEM_MC_ENDIF();
15955 IEM_MC_ADVANCE_RIP();
15956
15957 IEM_MC_END();
15958 return VINF_SUCCESS;
15959}
15960
15961
15962/** Opcode 0xdb 11/0. */
15963FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15964{
15965 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15967
15968 IEM_MC_BEGIN(0, 1);
15969 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15970
15971 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15972 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15973
15974 IEM_MC_PREPARE_FPU_USAGE();
15975 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15976 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15977 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15978 IEM_MC_ENDIF();
15979 IEM_MC_UPDATE_FPU_OPCODE_IP();
15980 IEM_MC_ELSE()
15981 IEM_MC_FPU_STACK_UNDERFLOW(0);
15982 IEM_MC_ENDIF();
15983 IEM_MC_ADVANCE_RIP();
15984
15985 IEM_MC_END();
15986 return VINF_SUCCESS;
15987}
15988
15989
15990/** Opcode 0xdb 11/1. */
15991FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15992{
15993 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15995
15996 IEM_MC_BEGIN(0, 1);
15997 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15998
15999 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16000 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16001
16002 IEM_MC_PREPARE_FPU_USAGE();
16003 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16004 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
16005 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16006 IEM_MC_ENDIF();
16007 IEM_MC_UPDATE_FPU_OPCODE_IP();
16008 IEM_MC_ELSE()
16009 IEM_MC_FPU_STACK_UNDERFLOW(0);
16010 IEM_MC_ENDIF();
16011 IEM_MC_ADVANCE_RIP();
16012
16013 IEM_MC_END();
16014 return VINF_SUCCESS;
16015}
16016
16017
16018/** Opcode 0xdb 11/2. */
16019FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
16020{
16021 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
16022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16023
16024 IEM_MC_BEGIN(0, 1);
16025 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16026
16027 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16028 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16029
16030 IEM_MC_PREPARE_FPU_USAGE();
16031 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16032 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
16033 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16034 IEM_MC_ENDIF();
16035 IEM_MC_UPDATE_FPU_OPCODE_IP();
16036 IEM_MC_ELSE()
16037 IEM_MC_FPU_STACK_UNDERFLOW(0);
16038 IEM_MC_ENDIF();
16039 IEM_MC_ADVANCE_RIP();
16040
16041 IEM_MC_END();
16042 return VINF_SUCCESS;
16043}
16044
16045
16046/** Opcode 0xdb 11/3. */
16047FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
16048{
16049 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
16050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16051
16052 IEM_MC_BEGIN(0, 1);
16053 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16054
16055 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16056 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16057
16058 IEM_MC_PREPARE_FPU_USAGE();
16059 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16060 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16061 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16062 IEM_MC_ENDIF();
16063 IEM_MC_UPDATE_FPU_OPCODE_IP();
16064 IEM_MC_ELSE()
16065 IEM_MC_FPU_STACK_UNDERFLOW(0);
16066 IEM_MC_ENDIF();
16067 IEM_MC_ADVANCE_RIP();
16068
16069 IEM_MC_END();
16070 return VINF_SUCCESS;
16071}
16072
16073
16074/** Opcode 0xdb 0xe0. */
16075FNIEMOP_DEF(iemOp_fneni)
16076{
16077 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16079 IEM_MC_BEGIN(0,0);
16080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16081 IEM_MC_ADVANCE_RIP();
16082 IEM_MC_END();
16083 return VINF_SUCCESS;
16084}
16085
16086
16087/** Opcode 0xdb 0xe1. */
16088FNIEMOP_DEF(iemOp_fndisi)
16089{
16090 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16092 IEM_MC_BEGIN(0,0);
16093 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16094 IEM_MC_ADVANCE_RIP();
16095 IEM_MC_END();
16096 return VINF_SUCCESS;
16097}
16098
16099
16100/** Opcode 0xdb 0xe2. */
16101FNIEMOP_DEF(iemOp_fnclex)
16102{
16103 IEMOP_MNEMONIC(fnclex, "fnclex");
16104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16105
16106 IEM_MC_BEGIN(0,0);
16107 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16108 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16109 IEM_MC_CLEAR_FSW_EX();
16110 IEM_MC_ADVANCE_RIP();
16111 IEM_MC_END();
16112 return VINF_SUCCESS;
16113}
16114
16115
16116/** Opcode 0xdb 0xe3. */
16117FNIEMOP_DEF(iemOp_fninit)
16118{
16119 IEMOP_MNEMONIC(fninit, "fninit");
16120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16121 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16122}
16123
16124
16125/** Opcode 0xdb 0xe4. */
16126FNIEMOP_DEF(iemOp_fnsetpm)
16127{
16128 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16130 IEM_MC_BEGIN(0,0);
16131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16132 IEM_MC_ADVANCE_RIP();
16133 IEM_MC_END();
16134 return VINF_SUCCESS;
16135}
16136
16137
16138/** Opcode 0xdb 0xe5. */
16139FNIEMOP_DEF(iemOp_frstpm)
16140{
16141 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16142#if 0 /* #UDs on newer CPUs */
16143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16144 IEM_MC_BEGIN(0,0);
16145 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16146 IEM_MC_ADVANCE_RIP();
16147 IEM_MC_END();
16148 return VINF_SUCCESS;
16149#else
16150 return IEMOP_RAISE_INVALID_OPCODE();
16151#endif
16152}
16153
16154
16155/** Opcode 0xdb 11/5. */
16156FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16157{
16158 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16159 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16160}
16161
16162
16163/** Opcode 0xdb 11/6. */
16164FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16165{
16166 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16167 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16168}
16169
16170
16171/** Opcode 0xdb. */
16172FNIEMOP_DEF(iemOp_EscF3)
16173{
16174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16175 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16176 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16177 {
16178 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16179 {
16180 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16181 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16182 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16183 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16184 case 4:
16185 switch (bRm)
16186 {
16187 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16188 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16189 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16190 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16191 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16192 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16193 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16194 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16196 }
16197 break;
16198 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16199 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16200 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16202 }
16203 }
16204 else
16205 {
16206 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16207 {
16208 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16209 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16210 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16211 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16212 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16213 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16214 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16215 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16217 }
16218 }
16219}
16220
16221
16222/**
16223 * Common worker for FPU instructions working on STn and ST0, and storing the
16224 * result in STn unless IE, DE or ZE was raised.
16225 *
16226 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16227 */
16228FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16229{
16230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16231
16232 IEM_MC_BEGIN(3, 1);
16233 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16234 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16235 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16236 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16237
16238 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16239 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16240
16241 IEM_MC_PREPARE_FPU_USAGE();
16242 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16243 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16244 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16245 IEM_MC_ELSE()
16246 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16247 IEM_MC_ENDIF();
16248 IEM_MC_ADVANCE_RIP();
16249
16250 IEM_MC_END();
16251 return VINF_SUCCESS;
16252}
16253
16254
16255/** Opcode 0xdc 11/0. */
16256FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16257{
16258 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16259 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16260}
16261
16262
16263/** Opcode 0xdc 11/1. */
16264FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16265{
16266 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16267 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16268}
16269
16270
16271/** Opcode 0xdc 11/4. */
16272FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16273{
16274 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16275 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16276}
16277
16278
16279/** Opcode 0xdc 11/5. */
16280FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16281{
16282 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16283 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16284}
16285
16286
16287/** Opcode 0xdc 11/6. */
16288FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16289{
16290 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16291 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16292}
16293
16294
16295/** Opcode 0xdc 11/7. */
16296FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16297{
16298 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16299 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16300}
16301
16302
16303/**
16304 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16305 * memory operand, and storing the result in ST0.
16306 *
16307 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16308 */
16309FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16310{
16311 IEM_MC_BEGIN(3, 3);
16312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16313 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16314 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16315 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16316 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16317 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16318
16319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16321 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16322 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16323
16324 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16325 IEM_MC_PREPARE_FPU_USAGE();
16326 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16327 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16328 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16329 IEM_MC_ELSE()
16330 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16331 IEM_MC_ENDIF();
16332 IEM_MC_ADVANCE_RIP();
16333
16334 IEM_MC_END();
16335 return VINF_SUCCESS;
16336}
16337
16338
16339/** Opcode 0xdc !11/0. */
16340FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16341{
16342 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16343 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16344}
16345
16346
16347/** Opcode 0xdc !11/1. */
16348FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16349{
16350 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16351 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16352}
16353
16354
16355/** Opcode 0xdc !11/2. */
16356FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16357{
16358 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16359
16360 IEM_MC_BEGIN(3, 3);
16361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16362 IEM_MC_LOCAL(uint16_t, u16Fsw);
16363 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16364 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16365 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16366 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16367
16368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16370
16371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16372 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16373 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16374
16375 IEM_MC_PREPARE_FPU_USAGE();
16376 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16377 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16378 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16379 IEM_MC_ELSE()
16380 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16381 IEM_MC_ENDIF();
16382 IEM_MC_ADVANCE_RIP();
16383
16384 IEM_MC_END();
16385 return VINF_SUCCESS;
16386}
16387
16388
16389/** Opcode 0xdc !11/3. */
16390FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16391{
16392 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16393
16394 IEM_MC_BEGIN(3, 3);
16395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16396 IEM_MC_LOCAL(uint16_t, u16Fsw);
16397 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16398 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16399 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16400 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16401
16402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16404
16405 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16406 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16407 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16408
16409 IEM_MC_PREPARE_FPU_USAGE();
16410 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16411 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16412 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16413 IEM_MC_ELSE()
16414 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16415 IEM_MC_ENDIF();
16416 IEM_MC_ADVANCE_RIP();
16417
16418 IEM_MC_END();
16419 return VINF_SUCCESS;
16420}
16421
16422
16423/** Opcode 0xdc !11/4. */
16424FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16425{
16426 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16427 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16428}
16429
16430
16431/** Opcode 0xdc !11/5. */
16432FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16433{
16434 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16435 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16436}
16437
16438
16439/** Opcode 0xdc !11/6. */
16440FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16441{
16442 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16443 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16444}
16445
16446
16447/** Opcode 0xdc !11/7. */
16448FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16449{
16450 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16451 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16452}
16453
16454
16455/** Opcode 0xdc. */
16456FNIEMOP_DEF(iemOp_EscF4)
16457{
16458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16459 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16461 {
16462 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16463 {
16464 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16465 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16466 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16467 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16468 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16469 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16470 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16471 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16473 }
16474 }
16475 else
16476 {
16477 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16478 {
16479 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16480 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16481 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16482 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16483 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16484 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16485 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16486 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16488 }
16489 }
16490}
16491
16492
16493/** Opcode 0xdd !11/0.
16494 * @sa iemOp_fld_m32r */
16495FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16496{
16497 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16498
16499 IEM_MC_BEGIN(2, 3);
16500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16501 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16502 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16503 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16504 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16505
16506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16508 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16509 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16510
16511 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16512 IEM_MC_PREPARE_FPU_USAGE();
16513 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16514 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16515 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16516 IEM_MC_ELSE()
16517 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16518 IEM_MC_ENDIF();
16519 IEM_MC_ADVANCE_RIP();
16520
16521 IEM_MC_END();
16522 return VINF_SUCCESS;
16523}
16524
16525
16526/** Opcode 0xdd !11/0. */
16527FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16528{
16529 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16530 IEM_MC_BEGIN(3, 2);
16531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16532 IEM_MC_LOCAL(uint16_t, u16Fsw);
16533 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16534 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16535 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16536
16537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16539 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16540 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16541
16542 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16543 IEM_MC_PREPARE_FPU_USAGE();
16544 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16545 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16546 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16547 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16548 IEM_MC_ELSE()
16549 IEM_MC_IF_FCW_IM()
16550 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16551 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16552 IEM_MC_ENDIF();
16553 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16554 IEM_MC_ENDIF();
16555 IEM_MC_ADVANCE_RIP();
16556
16557 IEM_MC_END();
16558 return VINF_SUCCESS;
16559}
16560
16561
16562/** Opcode 0xdd !11/0. */
16563FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16564{
16565 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16566 IEM_MC_BEGIN(3, 2);
16567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16568 IEM_MC_LOCAL(uint16_t, u16Fsw);
16569 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16570 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16571 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16572
16573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16577
16578 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16579 IEM_MC_PREPARE_FPU_USAGE();
16580 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16581 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16582 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16583 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16584 IEM_MC_ELSE()
16585 IEM_MC_IF_FCW_IM()
16586 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16587 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16588 IEM_MC_ENDIF();
16589 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16590 IEM_MC_ENDIF();
16591 IEM_MC_ADVANCE_RIP();
16592
16593 IEM_MC_END();
16594 return VINF_SUCCESS;
16595}
16596
16597
16598
16599
16600/** Opcode 0xdd !11/0. */
16601FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16602{
16603 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16604 IEM_MC_BEGIN(3, 2);
16605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16606 IEM_MC_LOCAL(uint16_t, u16Fsw);
16607 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16608 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16609 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16610
16611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16613 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16614 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16615
16616 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16617 IEM_MC_PREPARE_FPU_USAGE();
16618 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16619 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16620 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16621 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16622 IEM_MC_ELSE()
16623 IEM_MC_IF_FCW_IM()
16624 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16625 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16626 IEM_MC_ENDIF();
16627 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16628 IEM_MC_ENDIF();
16629 IEM_MC_ADVANCE_RIP();
16630
16631 IEM_MC_END();
16632 return VINF_SUCCESS;
16633}
16634
16635
16636/** Opcode 0xdd !11/0. */
16637FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16638{
16639 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16640 IEM_MC_BEGIN(3, 0);
16641 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16642 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16643 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16647 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16648 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16649 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16650 IEM_MC_END();
16651 return VINF_SUCCESS;
16652}
16653
16654
16655/** Opcode 0xdd !11/0. */
16656FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16657{
16658 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16659 IEM_MC_BEGIN(3, 0);
16660 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16661 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16662 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16666 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16667 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16668 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16669 IEM_MC_END();
16670 return VINF_SUCCESS;
16671
16672}
16673
16674/** Opcode 0xdd !11/0. */
16675FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16676{
16677 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16678
16679 IEM_MC_BEGIN(0, 2);
16680 IEM_MC_LOCAL(uint16_t, u16Tmp);
16681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16682
16683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16685 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16686
16687 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16688 IEM_MC_FETCH_FSW(u16Tmp);
16689 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16690 IEM_MC_ADVANCE_RIP();
16691
16692/** @todo Debug / drop a hint to the verifier that things may differ
16693 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16694 * NT4SP1. (X86_FSW_PE) */
16695 IEM_MC_END();
16696 return VINF_SUCCESS;
16697}
16698
16699
16700/** Opcode 0xdd 11/0. */
16701FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16702{
16703 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16705 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16706 unmodified. */
16707
16708 IEM_MC_BEGIN(0, 0);
16709
16710 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16711 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16712
16713 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16714 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16715 IEM_MC_UPDATE_FPU_OPCODE_IP();
16716
16717 IEM_MC_ADVANCE_RIP();
16718 IEM_MC_END();
16719 return VINF_SUCCESS;
16720}
16721
16722
16723/** Opcode 0xdd 11/1. */
16724FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16725{
16726 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16728
16729 IEM_MC_BEGIN(0, 2);
16730 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16731 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16732 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16733 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16734
16735 IEM_MC_PREPARE_FPU_USAGE();
16736 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16737 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16738 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16739 IEM_MC_ELSE()
16740 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16741 IEM_MC_ENDIF();
16742
16743 IEM_MC_ADVANCE_RIP();
16744 IEM_MC_END();
16745 return VINF_SUCCESS;
16746}
16747
16748
16749/** Opcode 0xdd 11/3. */
16750FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16751{
16752 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16753 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16754}
16755
16756
16757/** Opcode 0xdd 11/4. */
16758FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16759{
16760 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16761 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16762}
16763
16764
16765/** Opcode 0xdd. */
16766FNIEMOP_DEF(iemOp_EscF5)
16767{
16768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16769 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16771 {
16772 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16773 {
16774 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16775 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16776 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16777 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16778 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16779 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16780 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16781 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16783 }
16784 }
16785 else
16786 {
16787 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16788 {
16789 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16790 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16791 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16792 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16793 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16794 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16795 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16796 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16798 }
16799 }
16800}
16801
16802
16803/** Opcode 0xde 11/0. */
16804FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16805{
16806 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16807 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16808}
16809
16810
16811/** Opcode 0xde 11/0. */
16812FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16813{
16814 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16815 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16816}
16817
16818
16819/** Opcode 0xde 0xd9. */
16820FNIEMOP_DEF(iemOp_fcompp)
16821{
16822 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16823 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16824}
16825
16826
16827/** Opcode 0xde 11/4. */
16828FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16829{
16830 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16831 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16832}
16833
16834
16835/** Opcode 0xde 11/5. */
16836FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16837{
16838 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16839 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16840}
16841
16842
16843/** Opcode 0xde 11/6. */
16844FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16845{
16846 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16847 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16848}
16849
16850
16851/** Opcode 0xde 11/7. */
16852FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16853{
16854 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16855 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16856}
16857
16858
16859/**
16860 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16861 * the result in ST0.
16862 *
16863 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16864 */
16865FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16866{
16867 IEM_MC_BEGIN(3, 3);
16868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16869 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16870 IEM_MC_LOCAL(int16_t, i16Val2);
16871 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16872 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16873 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16874
16875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16877
16878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16879 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16880 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16881
16882 IEM_MC_PREPARE_FPU_USAGE();
16883 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16884 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16885 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16886 IEM_MC_ELSE()
16887 IEM_MC_FPU_STACK_UNDERFLOW(0);
16888 IEM_MC_ENDIF();
16889 IEM_MC_ADVANCE_RIP();
16890
16891 IEM_MC_END();
16892 return VINF_SUCCESS;
16893}
16894
16895
16896/** Opcode 0xde !11/0. */
16897FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16898{
16899 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16900 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16901}
16902
16903
16904/** Opcode 0xde !11/1. */
16905FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16906{
16907 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16908 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16909}
16910
16911
16912/** Opcode 0xde !11/2. */
16913FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16914{
16915 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16916
16917 IEM_MC_BEGIN(3, 3);
16918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16919 IEM_MC_LOCAL(uint16_t, u16Fsw);
16920 IEM_MC_LOCAL(int16_t, i16Val2);
16921 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16922 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16923 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16924
16925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16927
16928 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16929 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16930 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16931
16932 IEM_MC_PREPARE_FPU_USAGE();
16933 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16934 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16935 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16936 IEM_MC_ELSE()
16937 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16938 IEM_MC_ENDIF();
16939 IEM_MC_ADVANCE_RIP();
16940
16941 IEM_MC_END();
16942 return VINF_SUCCESS;
16943}
16944
16945
16946/** Opcode 0xde !11/3. */
16947FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16948{
16949 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16950
16951 IEM_MC_BEGIN(3, 3);
16952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16953 IEM_MC_LOCAL(uint16_t, u16Fsw);
16954 IEM_MC_LOCAL(int16_t, i16Val2);
16955 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16956 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16957 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16958
16959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16961
16962 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16963 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16964 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16965
16966 IEM_MC_PREPARE_FPU_USAGE();
16967 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16968 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16969 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16970 IEM_MC_ELSE()
16971 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16972 IEM_MC_ENDIF();
16973 IEM_MC_ADVANCE_RIP();
16974
16975 IEM_MC_END();
16976 return VINF_SUCCESS;
16977}
16978
16979
16980/** Opcode 0xde !11/4. */
16981FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16982{
16983 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16984 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16985}
16986
16987
16988/** Opcode 0xde !11/5. */
16989FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16990{
16991 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16992 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16993}
16994
16995
16996/** Opcode 0xde !11/6. */
16997FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16998{
16999 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
17000 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
17001}
17002
17003
17004/** Opcode 0xde !11/7. */
17005FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
17006{
17007 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
17008 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
17009}
17010
17011
17012/** Opcode 0xde. */
17013FNIEMOP_DEF(iemOp_EscF6)
17014{
17015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17016 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
17017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17018 {
17019 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17020 {
17021 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
17022 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
17023 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
17024 case 3: if (bRm == 0xd9)
17025 return FNIEMOP_CALL(iemOp_fcompp);
17026 return IEMOP_RAISE_INVALID_OPCODE();
17027 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
17028 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
17029 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
17030 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
17031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17032 }
17033 }
17034 else
17035 {
17036 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17037 {
17038 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
17039 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
17040 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
17041 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
17042 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
17043 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
17044 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
17045 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
17046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17047 }
17048 }
17049}
17050
17051
17052/** Opcode 0xdf 11/0.
17053 * Undocument instruction, assumed to work like ffree + fincstp. */
17054FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17055{
17056 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17058
17059 IEM_MC_BEGIN(0, 0);
17060
17061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17063
17064 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17065 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17066 IEM_MC_FPU_STACK_INC_TOP();
17067 IEM_MC_UPDATE_FPU_OPCODE_IP();
17068
17069 IEM_MC_ADVANCE_RIP();
17070 IEM_MC_END();
17071 return VINF_SUCCESS;
17072}
17073
17074
17075/** Opcode 0xdf 0xe0. */
17076FNIEMOP_DEF(iemOp_fnstsw_ax)
17077{
17078 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17080
17081 IEM_MC_BEGIN(0, 1);
17082 IEM_MC_LOCAL(uint16_t, u16Tmp);
17083 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17084 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17085 IEM_MC_FETCH_FSW(u16Tmp);
17086 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17087 IEM_MC_ADVANCE_RIP();
17088 IEM_MC_END();
17089 return VINF_SUCCESS;
17090}
17091
17092
17093/** Opcode 0xdf 11/5. */
17094FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17095{
17096 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17097 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17098}
17099
17100
17101/** Opcode 0xdf 11/6. */
17102FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17103{
17104 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17105 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17106}
17107
17108
17109/** Opcode 0xdf !11/0. */
17110FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17111{
17112 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17113
17114 IEM_MC_BEGIN(2, 3);
17115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17116 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17117 IEM_MC_LOCAL(int16_t, i16Val);
17118 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17119 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17120
17121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17123
17124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17125 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17126 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17127
17128 IEM_MC_PREPARE_FPU_USAGE();
17129 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17130 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17131 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17132 IEM_MC_ELSE()
17133 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17134 IEM_MC_ENDIF();
17135 IEM_MC_ADVANCE_RIP();
17136
17137 IEM_MC_END();
17138 return VINF_SUCCESS;
17139}
17140
17141
17142/** Opcode 0xdf !11/1. */
17143FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17144{
17145 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17146 IEM_MC_BEGIN(3, 2);
17147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17148 IEM_MC_LOCAL(uint16_t, u16Fsw);
17149 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17150 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17151 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17152
17153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17156 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17157
17158 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17159 IEM_MC_PREPARE_FPU_USAGE();
17160 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17161 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17162 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17163 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17164 IEM_MC_ELSE()
17165 IEM_MC_IF_FCW_IM()
17166 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17167 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17168 IEM_MC_ENDIF();
17169 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17170 IEM_MC_ENDIF();
17171 IEM_MC_ADVANCE_RIP();
17172
17173 IEM_MC_END();
17174 return VINF_SUCCESS;
17175}
17176
17177
17178/** Opcode 0xdf !11/2. */
17179FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17180{
17181 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17182 IEM_MC_BEGIN(3, 2);
17183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17184 IEM_MC_LOCAL(uint16_t, u16Fsw);
17185 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17186 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17187 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17188
17189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17191 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17192 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17193
17194 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17195 IEM_MC_PREPARE_FPU_USAGE();
17196 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17197 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17198 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17199 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17200 IEM_MC_ELSE()
17201 IEM_MC_IF_FCW_IM()
17202 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17203 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17204 IEM_MC_ENDIF();
17205 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17206 IEM_MC_ENDIF();
17207 IEM_MC_ADVANCE_RIP();
17208
17209 IEM_MC_END();
17210 return VINF_SUCCESS;
17211}
17212
17213
17214/** Opcode 0xdf !11/3. */
17215FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17216{
17217 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17218 IEM_MC_BEGIN(3, 2);
17219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17220 IEM_MC_LOCAL(uint16_t, u16Fsw);
17221 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17222 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17223 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17224
17225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17227 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17228 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17229
17230 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17231 IEM_MC_PREPARE_FPU_USAGE();
17232 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17233 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17234 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17235 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17236 IEM_MC_ELSE()
17237 IEM_MC_IF_FCW_IM()
17238 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17239 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17240 IEM_MC_ENDIF();
17241 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17242 IEM_MC_ENDIF();
17243 IEM_MC_ADVANCE_RIP();
17244
17245 IEM_MC_END();
17246 return VINF_SUCCESS;
17247}
17248
17249
17250/** Opcode 0xdf !11/4. */
17251FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17252
17253
17254/** Opcode 0xdf !11/5. */
17255FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17256{
17257 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17258
17259 IEM_MC_BEGIN(2, 3);
17260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17261 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17262 IEM_MC_LOCAL(int64_t, i64Val);
17263 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17264 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17265
17266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17268
17269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17271 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17272
17273 IEM_MC_PREPARE_FPU_USAGE();
17274 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17275 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17276 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17277 IEM_MC_ELSE()
17278 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17279 IEM_MC_ENDIF();
17280 IEM_MC_ADVANCE_RIP();
17281
17282 IEM_MC_END();
17283 return VINF_SUCCESS;
17284}
17285
17286
17287/** Opcode 0xdf !11/6. */
17288FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17289
17290
17291/** Opcode 0xdf !11/7. */
17292FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17293{
17294 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17295 IEM_MC_BEGIN(3, 2);
17296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17297 IEM_MC_LOCAL(uint16_t, u16Fsw);
17298 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17299 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17301
17302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17304 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17305 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17306
17307 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17308 IEM_MC_PREPARE_FPU_USAGE();
17309 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17310 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17311 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17312 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17313 IEM_MC_ELSE()
17314 IEM_MC_IF_FCW_IM()
17315 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17316 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17317 IEM_MC_ENDIF();
17318 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17319 IEM_MC_ENDIF();
17320 IEM_MC_ADVANCE_RIP();
17321
17322 IEM_MC_END();
17323 return VINF_SUCCESS;
17324}
17325
17326
17327/** Opcode 0xdf. */
17328FNIEMOP_DEF(iemOp_EscF7)
17329{
17330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17332 {
17333 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17334 {
17335 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17336 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17337 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17338 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17339 case 4: if (bRm == 0xe0)
17340 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17341 return IEMOP_RAISE_INVALID_OPCODE();
17342 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17343 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17344 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17346 }
17347 }
17348 else
17349 {
17350 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17351 {
17352 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17353 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17354 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17355 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17356 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17357 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17358 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17359 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17361 }
17362 }
17363}
17364
17365
17366/** Opcode 0xe0. */
17367FNIEMOP_DEF(iemOp_loopne_Jb)
17368{
17369 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17370 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17372 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17373
17374 switch (pVCpu->iem.s.enmEffAddrMode)
17375 {
17376 case IEMMODE_16BIT:
17377 IEM_MC_BEGIN(0,0);
17378 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17379 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17380 IEM_MC_REL_JMP_S8(i8Imm);
17381 } IEM_MC_ELSE() {
17382 IEM_MC_ADVANCE_RIP();
17383 } IEM_MC_ENDIF();
17384 IEM_MC_END();
17385 return VINF_SUCCESS;
17386
17387 case IEMMODE_32BIT:
17388 IEM_MC_BEGIN(0,0);
17389 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17390 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17391 IEM_MC_REL_JMP_S8(i8Imm);
17392 } IEM_MC_ELSE() {
17393 IEM_MC_ADVANCE_RIP();
17394 } IEM_MC_ENDIF();
17395 IEM_MC_END();
17396 return VINF_SUCCESS;
17397
17398 case IEMMODE_64BIT:
17399 IEM_MC_BEGIN(0,0);
17400 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17401 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17402 IEM_MC_REL_JMP_S8(i8Imm);
17403 } IEM_MC_ELSE() {
17404 IEM_MC_ADVANCE_RIP();
17405 } IEM_MC_ENDIF();
17406 IEM_MC_END();
17407 return VINF_SUCCESS;
17408
17409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17410 }
17411}
17412
17413
17414/** Opcode 0xe1. */
17415FNIEMOP_DEF(iemOp_loope_Jb)
17416{
17417 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17418 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17420 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17421
17422 switch (pVCpu->iem.s.enmEffAddrMode)
17423 {
17424 case IEMMODE_16BIT:
17425 IEM_MC_BEGIN(0,0);
17426 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17427 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17428 IEM_MC_REL_JMP_S8(i8Imm);
17429 } IEM_MC_ELSE() {
17430 IEM_MC_ADVANCE_RIP();
17431 } IEM_MC_ENDIF();
17432 IEM_MC_END();
17433 return VINF_SUCCESS;
17434
17435 case IEMMODE_32BIT:
17436 IEM_MC_BEGIN(0,0);
17437 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17438 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17439 IEM_MC_REL_JMP_S8(i8Imm);
17440 } IEM_MC_ELSE() {
17441 IEM_MC_ADVANCE_RIP();
17442 } IEM_MC_ENDIF();
17443 IEM_MC_END();
17444 return VINF_SUCCESS;
17445
17446 case IEMMODE_64BIT:
17447 IEM_MC_BEGIN(0,0);
17448 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17449 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17450 IEM_MC_REL_JMP_S8(i8Imm);
17451 } IEM_MC_ELSE() {
17452 IEM_MC_ADVANCE_RIP();
17453 } IEM_MC_ENDIF();
17454 IEM_MC_END();
17455 return VINF_SUCCESS;
17456
17457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17458 }
17459}
17460
17461
17462/** Opcode 0xe2. */
17463FNIEMOP_DEF(iemOp_loop_Jb)
17464{
17465 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17466 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17468 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17469
17470 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17471 * using the 32-bit operand size override. How can that be restarted? See
17472 * weird pseudo code in intel manual. */
17473 switch (pVCpu->iem.s.enmEffAddrMode)
17474 {
17475 case IEMMODE_16BIT:
17476 IEM_MC_BEGIN(0,0);
17477 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17478 {
17479 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17480 IEM_MC_IF_CX_IS_NZ() {
17481 IEM_MC_REL_JMP_S8(i8Imm);
17482 } IEM_MC_ELSE() {
17483 IEM_MC_ADVANCE_RIP();
17484 } IEM_MC_ENDIF();
17485 }
17486 else
17487 {
17488 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17489 IEM_MC_ADVANCE_RIP();
17490 }
17491 IEM_MC_END();
17492 return VINF_SUCCESS;
17493
17494 case IEMMODE_32BIT:
17495 IEM_MC_BEGIN(0,0);
17496 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17497 {
17498 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17499 IEM_MC_IF_ECX_IS_NZ() {
17500 IEM_MC_REL_JMP_S8(i8Imm);
17501 } IEM_MC_ELSE() {
17502 IEM_MC_ADVANCE_RIP();
17503 } IEM_MC_ENDIF();
17504 }
17505 else
17506 {
17507 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17508 IEM_MC_ADVANCE_RIP();
17509 }
17510 IEM_MC_END();
17511 return VINF_SUCCESS;
17512
17513 case IEMMODE_64BIT:
17514 IEM_MC_BEGIN(0,0);
17515 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17516 {
17517 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17518 IEM_MC_IF_RCX_IS_NZ() {
17519 IEM_MC_REL_JMP_S8(i8Imm);
17520 } IEM_MC_ELSE() {
17521 IEM_MC_ADVANCE_RIP();
17522 } IEM_MC_ENDIF();
17523 }
17524 else
17525 {
17526 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17527 IEM_MC_ADVANCE_RIP();
17528 }
17529 IEM_MC_END();
17530 return VINF_SUCCESS;
17531
17532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17533 }
17534}
17535
17536
17537/** Opcode 0xe3. */
17538FNIEMOP_DEF(iemOp_jecxz_Jb)
17539{
17540 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17541 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17543 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17544
17545 switch (pVCpu->iem.s.enmEffAddrMode)
17546 {
17547 case IEMMODE_16BIT:
17548 IEM_MC_BEGIN(0,0);
17549 IEM_MC_IF_CX_IS_NZ() {
17550 IEM_MC_ADVANCE_RIP();
17551 } IEM_MC_ELSE() {
17552 IEM_MC_REL_JMP_S8(i8Imm);
17553 } IEM_MC_ENDIF();
17554 IEM_MC_END();
17555 return VINF_SUCCESS;
17556
17557 case IEMMODE_32BIT:
17558 IEM_MC_BEGIN(0,0);
17559 IEM_MC_IF_ECX_IS_NZ() {
17560 IEM_MC_ADVANCE_RIP();
17561 } IEM_MC_ELSE() {
17562 IEM_MC_REL_JMP_S8(i8Imm);
17563 } IEM_MC_ENDIF();
17564 IEM_MC_END();
17565 return VINF_SUCCESS;
17566
17567 case IEMMODE_64BIT:
17568 IEM_MC_BEGIN(0,0);
17569 IEM_MC_IF_RCX_IS_NZ() {
17570 IEM_MC_ADVANCE_RIP();
17571 } IEM_MC_ELSE() {
17572 IEM_MC_REL_JMP_S8(i8Imm);
17573 } IEM_MC_ENDIF();
17574 IEM_MC_END();
17575 return VINF_SUCCESS;
17576
17577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17578 }
17579}
17580
17581
17582/** Opcode 0xe4 */
17583FNIEMOP_DEF(iemOp_in_AL_Ib)
17584{
17585 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17586 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17588 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17589}
17590
17591
17592/** Opcode 0xe5 */
17593FNIEMOP_DEF(iemOp_in_eAX_Ib)
17594{
17595 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17596 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17598 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17599}
17600
17601
17602/** Opcode 0xe6 */
17603FNIEMOP_DEF(iemOp_out_Ib_AL)
17604{
17605 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17606 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17608 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17609}
17610
17611
17612/** Opcode 0xe7 */
17613FNIEMOP_DEF(iemOp_out_Ib_eAX)
17614{
17615 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17616 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17618 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17619}
17620
17621
17622/** Opcode 0xe8. */
17623FNIEMOP_DEF(iemOp_call_Jv)
17624{
17625 IEMOP_MNEMONIC(call_Jv, "call Jv");
17626 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17627 switch (pVCpu->iem.s.enmEffOpSize)
17628 {
17629 case IEMMODE_16BIT:
17630 {
17631 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17632 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17633 }
17634
17635 case IEMMODE_32BIT:
17636 {
17637 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17638 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17639 }
17640
17641 case IEMMODE_64BIT:
17642 {
17643 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17644 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17645 }
17646
17647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17648 }
17649}
17650
17651
17652/** Opcode 0xe9. */
17653FNIEMOP_DEF(iemOp_jmp_Jv)
17654{
17655 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17656 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17657 switch (pVCpu->iem.s.enmEffOpSize)
17658 {
17659 case IEMMODE_16BIT:
17660 {
17661 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17662 IEM_MC_BEGIN(0, 0);
17663 IEM_MC_REL_JMP_S16(i16Imm);
17664 IEM_MC_END();
17665 return VINF_SUCCESS;
17666 }
17667
17668 case IEMMODE_64BIT:
17669 case IEMMODE_32BIT:
17670 {
17671 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17672 IEM_MC_BEGIN(0, 0);
17673 IEM_MC_REL_JMP_S32(i32Imm);
17674 IEM_MC_END();
17675 return VINF_SUCCESS;
17676 }
17677
17678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17679 }
17680}
17681
17682
17683/** Opcode 0xea. */
17684FNIEMOP_DEF(iemOp_jmp_Ap)
17685{
17686 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17687 IEMOP_HLP_NO_64BIT();
17688
17689 /* Decode the far pointer address and pass it on to the far call C implementation. */
17690 uint32_t offSeg;
17691 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17692 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17693 else
17694 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17695 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17697 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17698}
17699
17700
17701/** Opcode 0xeb. */
17702FNIEMOP_DEF(iemOp_jmp_Jb)
17703{
17704 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17705 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17707 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17708
17709 IEM_MC_BEGIN(0, 0);
17710 IEM_MC_REL_JMP_S8(i8Imm);
17711 IEM_MC_END();
17712 return VINF_SUCCESS;
17713}
17714
17715
17716/** Opcode 0xec */
17717FNIEMOP_DEF(iemOp_in_AL_DX)
17718{
17719 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17721 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17722}
17723
17724
17725/** Opcode 0xed */
17726FNIEMOP_DEF(iemOp_eAX_DX)
17727{
17728 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17730 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17731}
17732
17733
17734/** Opcode 0xee */
17735FNIEMOP_DEF(iemOp_out_DX_AL)
17736{
17737 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17739 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17740}
17741
17742
17743/** Opcode 0xef */
17744FNIEMOP_DEF(iemOp_out_DX_eAX)
17745{
17746 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17748 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17749}
17750
17751
17752/** Opcode 0xf0. */
17753FNIEMOP_DEF(iemOp_lock)
17754{
17755 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17756 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17757
17758 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17759 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17760}
17761
17762
17763/** Opcode 0xf1. */
17764FNIEMOP_DEF(iemOp_int_1)
17765{
17766 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17767 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17768 /** @todo testcase! */
17769 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17770}
17771
17772
17773/** Opcode 0xf2. */
17774FNIEMOP_DEF(iemOp_repne)
17775{
17776 /* This overrides any previous REPE prefix. */
17777 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17778 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17779 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17780
17781 /* For the 4 entry opcode tables, REPNZ overrides any previous
17782 REPZ and operand size prefixes. */
17783 pVCpu->iem.s.idxPrefix = 3;
17784
17785 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17786 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17787}
17788
17789
17790/** Opcode 0xf3. */
17791FNIEMOP_DEF(iemOp_repe)
17792{
17793 /* This overrides any previous REPNE prefix. */
17794 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17795 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17796 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17797
17798 /* For the 4 entry opcode tables, REPNZ overrides any previous
17799 REPNZ and operand size prefixes. */
17800 pVCpu->iem.s.idxPrefix = 2;
17801
17802 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17803 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17804}
17805
17806
17807/** Opcode 0xf4. */
17808FNIEMOP_DEF(iemOp_hlt)
17809{
17810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17811 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17812}
17813
17814
17815/** Opcode 0xf5. */
17816FNIEMOP_DEF(iemOp_cmc)
17817{
17818 IEMOP_MNEMONIC(cmc, "cmc");
17819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17820 IEM_MC_BEGIN(0, 0);
17821 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17822 IEM_MC_ADVANCE_RIP();
17823 IEM_MC_END();
17824 return VINF_SUCCESS;
17825}
17826
17827
17828/**
17829 * Common implementation of 'inc/dec/not/neg Eb'.
17830 *
17831 * @param bRm The RM byte.
17832 * @param pImpl The instruction implementation.
17833 */
17834FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17835{
17836 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17837 {
17838 /* register access */
17839 IEM_MC_BEGIN(2, 0);
17840 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17841 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17842 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17843 IEM_MC_REF_EFLAGS(pEFlags);
17844 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17845 IEM_MC_ADVANCE_RIP();
17846 IEM_MC_END();
17847 }
17848 else
17849 {
17850 /* memory access. */
17851 IEM_MC_BEGIN(2, 2);
17852 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17855
17856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17857 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17858 IEM_MC_FETCH_EFLAGS(EFlags);
17859 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17860 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17861 else
17862 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17863
17864 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17865 IEM_MC_COMMIT_EFLAGS(EFlags);
17866 IEM_MC_ADVANCE_RIP();
17867 IEM_MC_END();
17868 }
17869 return VINF_SUCCESS;
17870}
17871
17872
17873/**
17874 * Common implementation of 'inc/dec/not/neg Ev'.
17875 *
17876 * @param bRm The RM byte.
17877 * @param pImpl The instruction implementation.
17878 */
17879FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17880{
17881 /* Registers are handled by a common worker. */
17882 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17883 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17884
17885 /* Memory we do here. */
17886 switch (pVCpu->iem.s.enmEffOpSize)
17887 {
17888 case IEMMODE_16BIT:
17889 IEM_MC_BEGIN(2, 2);
17890 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17891 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17893
17894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17895 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17896 IEM_MC_FETCH_EFLAGS(EFlags);
17897 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17898 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17899 else
17900 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17901
17902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17903 IEM_MC_COMMIT_EFLAGS(EFlags);
17904 IEM_MC_ADVANCE_RIP();
17905 IEM_MC_END();
17906 return VINF_SUCCESS;
17907
17908 case IEMMODE_32BIT:
17909 IEM_MC_BEGIN(2, 2);
17910 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17911 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17913
17914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17915 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17916 IEM_MC_FETCH_EFLAGS(EFlags);
17917 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17918 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17919 else
17920 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17921
17922 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17923 IEM_MC_COMMIT_EFLAGS(EFlags);
17924 IEM_MC_ADVANCE_RIP();
17925 IEM_MC_END();
17926 return VINF_SUCCESS;
17927
17928 case IEMMODE_64BIT:
17929 IEM_MC_BEGIN(2, 2);
17930 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17931 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17933
17934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17935 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17936 IEM_MC_FETCH_EFLAGS(EFlags);
17937 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17938 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17939 else
17940 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17941
17942 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17943 IEM_MC_COMMIT_EFLAGS(EFlags);
17944 IEM_MC_ADVANCE_RIP();
17945 IEM_MC_END();
17946 return VINF_SUCCESS;
17947
17948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17949 }
17950}
17951
17952
17953/** Opcode 0xf6 /0. */
17954FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17955{
17956 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17957 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17958
17959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17960 {
17961 /* register access */
17962 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17964
17965 IEM_MC_BEGIN(3, 0);
17966 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17967 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17968 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17969 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17970 IEM_MC_REF_EFLAGS(pEFlags);
17971 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17972 IEM_MC_ADVANCE_RIP();
17973 IEM_MC_END();
17974 }
17975 else
17976 {
17977 /* memory access. */
17978 IEM_MC_BEGIN(3, 2);
17979 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17980 IEM_MC_ARG(uint8_t, u8Src, 1);
17981 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17983
17984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17985 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17986 IEM_MC_ASSIGN(u8Src, u8Imm);
17987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17988 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17989 IEM_MC_FETCH_EFLAGS(EFlags);
17990 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17991
17992 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17993 IEM_MC_COMMIT_EFLAGS(EFlags);
17994 IEM_MC_ADVANCE_RIP();
17995 IEM_MC_END();
17996 }
17997 return VINF_SUCCESS;
17998}
17999
18000
18001/** Opcode 0xf7 /0. */
18002FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
18003{
18004 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
18005 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18006
18007 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18008 {
18009 /* register access */
18010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18011 switch (pVCpu->iem.s.enmEffOpSize)
18012 {
18013 case IEMMODE_16BIT:
18014 {
18015 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18016 IEM_MC_BEGIN(3, 0);
18017 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18018 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
18019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18020 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18021 IEM_MC_REF_EFLAGS(pEFlags);
18022 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18023 IEM_MC_ADVANCE_RIP();
18024 IEM_MC_END();
18025 return VINF_SUCCESS;
18026 }
18027
18028 case IEMMODE_32BIT:
18029 {
18030 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18031 IEM_MC_BEGIN(3, 0);
18032 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18033 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
18034 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18035 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18036 IEM_MC_REF_EFLAGS(pEFlags);
18037 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18038 /* No clearing the high dword here - test doesn't write back the result. */
18039 IEM_MC_ADVANCE_RIP();
18040 IEM_MC_END();
18041 return VINF_SUCCESS;
18042 }
18043
18044 case IEMMODE_64BIT:
18045 {
18046 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18047 IEM_MC_BEGIN(3, 0);
18048 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18049 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
18050 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18051 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18052 IEM_MC_REF_EFLAGS(pEFlags);
18053 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18054 IEM_MC_ADVANCE_RIP();
18055 IEM_MC_END();
18056 return VINF_SUCCESS;
18057 }
18058
18059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18060 }
18061 }
18062 else
18063 {
18064 /* memory access. */
18065 switch (pVCpu->iem.s.enmEffOpSize)
18066 {
18067 case IEMMODE_16BIT:
18068 {
18069 IEM_MC_BEGIN(3, 2);
18070 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18071 IEM_MC_ARG(uint16_t, u16Src, 1);
18072 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18074
18075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18076 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18077 IEM_MC_ASSIGN(u16Src, u16Imm);
18078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18079 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18080 IEM_MC_FETCH_EFLAGS(EFlags);
18081 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18082
18083 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18084 IEM_MC_COMMIT_EFLAGS(EFlags);
18085 IEM_MC_ADVANCE_RIP();
18086 IEM_MC_END();
18087 return VINF_SUCCESS;
18088 }
18089
18090 case IEMMODE_32BIT:
18091 {
18092 IEM_MC_BEGIN(3, 2);
18093 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18094 IEM_MC_ARG(uint32_t, u32Src, 1);
18095 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18097
18098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18099 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18100 IEM_MC_ASSIGN(u32Src, u32Imm);
18101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18102 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18103 IEM_MC_FETCH_EFLAGS(EFlags);
18104 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18105
18106 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18107 IEM_MC_COMMIT_EFLAGS(EFlags);
18108 IEM_MC_ADVANCE_RIP();
18109 IEM_MC_END();
18110 return VINF_SUCCESS;
18111 }
18112
18113 case IEMMODE_64BIT:
18114 {
18115 IEM_MC_BEGIN(3, 2);
18116 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18117 IEM_MC_ARG(uint64_t, u64Src, 1);
18118 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18120
18121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18122 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18123 IEM_MC_ASSIGN(u64Src, u64Imm);
18124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18125 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18126 IEM_MC_FETCH_EFLAGS(EFlags);
18127 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18128
18129 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18130 IEM_MC_COMMIT_EFLAGS(EFlags);
18131 IEM_MC_ADVANCE_RIP();
18132 IEM_MC_END();
18133 return VINF_SUCCESS;
18134 }
18135
18136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18137 }
18138 }
18139}
18140
18141
18142/** Opcode 0xf6 /4, /5, /6 and /7. */
18143FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18144{
18145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18146 {
18147 /* register access */
18148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18149 IEM_MC_BEGIN(3, 1);
18150 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18151 IEM_MC_ARG(uint8_t, u8Value, 1);
18152 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18153 IEM_MC_LOCAL(int32_t, rc);
18154
18155 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18156 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18157 IEM_MC_REF_EFLAGS(pEFlags);
18158 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18159 IEM_MC_IF_LOCAL_IS_Z(rc) {
18160 IEM_MC_ADVANCE_RIP();
18161 } IEM_MC_ELSE() {
18162 IEM_MC_RAISE_DIVIDE_ERROR();
18163 } IEM_MC_ENDIF();
18164
18165 IEM_MC_END();
18166 }
18167 else
18168 {
18169 /* memory access. */
18170 IEM_MC_BEGIN(3, 2);
18171 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18172 IEM_MC_ARG(uint8_t, u8Value, 1);
18173 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18175 IEM_MC_LOCAL(int32_t, rc);
18176
18177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18179 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18180 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18181 IEM_MC_REF_EFLAGS(pEFlags);
18182 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18183 IEM_MC_IF_LOCAL_IS_Z(rc) {
18184 IEM_MC_ADVANCE_RIP();
18185 } IEM_MC_ELSE() {
18186 IEM_MC_RAISE_DIVIDE_ERROR();
18187 } IEM_MC_ENDIF();
18188
18189 IEM_MC_END();
18190 }
18191 return VINF_SUCCESS;
18192}
18193
18194
18195/** Opcode 0xf7 /4, /5, /6 and /7. */
18196FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18197{
18198 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18199
18200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18201 {
18202 /* register access */
18203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18204 switch (pVCpu->iem.s.enmEffOpSize)
18205 {
18206 case IEMMODE_16BIT:
18207 {
18208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18209 IEM_MC_BEGIN(4, 1);
18210 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18211 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18212 IEM_MC_ARG(uint16_t, u16Value, 2);
18213 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18214 IEM_MC_LOCAL(int32_t, rc);
18215
18216 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18217 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18218 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18219 IEM_MC_REF_EFLAGS(pEFlags);
18220 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18221 IEM_MC_IF_LOCAL_IS_Z(rc) {
18222 IEM_MC_ADVANCE_RIP();
18223 } IEM_MC_ELSE() {
18224 IEM_MC_RAISE_DIVIDE_ERROR();
18225 } IEM_MC_ENDIF();
18226
18227 IEM_MC_END();
18228 return VINF_SUCCESS;
18229 }
18230
18231 case IEMMODE_32BIT:
18232 {
18233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18234 IEM_MC_BEGIN(4, 1);
18235 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18236 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18237 IEM_MC_ARG(uint32_t, u32Value, 2);
18238 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18239 IEM_MC_LOCAL(int32_t, rc);
18240
18241 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18242 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18243 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18244 IEM_MC_REF_EFLAGS(pEFlags);
18245 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18246 IEM_MC_IF_LOCAL_IS_Z(rc) {
18247 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18248 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18249 IEM_MC_ADVANCE_RIP();
18250 } IEM_MC_ELSE() {
18251 IEM_MC_RAISE_DIVIDE_ERROR();
18252 } IEM_MC_ENDIF();
18253
18254 IEM_MC_END();
18255 return VINF_SUCCESS;
18256 }
18257
18258 case IEMMODE_64BIT:
18259 {
18260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18261 IEM_MC_BEGIN(4, 1);
18262 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18263 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18264 IEM_MC_ARG(uint64_t, u64Value, 2);
18265 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18266 IEM_MC_LOCAL(int32_t, rc);
18267
18268 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18269 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18270 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18271 IEM_MC_REF_EFLAGS(pEFlags);
18272 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18273 IEM_MC_IF_LOCAL_IS_Z(rc) {
18274 IEM_MC_ADVANCE_RIP();
18275 } IEM_MC_ELSE() {
18276 IEM_MC_RAISE_DIVIDE_ERROR();
18277 } IEM_MC_ENDIF();
18278
18279 IEM_MC_END();
18280 return VINF_SUCCESS;
18281 }
18282
18283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18284 }
18285 }
18286 else
18287 {
18288 /* memory access. */
18289 switch (pVCpu->iem.s.enmEffOpSize)
18290 {
18291 case IEMMODE_16BIT:
18292 {
18293 IEM_MC_BEGIN(4, 2);
18294 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18295 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18296 IEM_MC_ARG(uint16_t, u16Value, 2);
18297 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18299 IEM_MC_LOCAL(int32_t, rc);
18300
18301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18303 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18304 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18305 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18306 IEM_MC_REF_EFLAGS(pEFlags);
18307 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18308 IEM_MC_IF_LOCAL_IS_Z(rc) {
18309 IEM_MC_ADVANCE_RIP();
18310 } IEM_MC_ELSE() {
18311 IEM_MC_RAISE_DIVIDE_ERROR();
18312 } IEM_MC_ENDIF();
18313
18314 IEM_MC_END();
18315 return VINF_SUCCESS;
18316 }
18317
18318 case IEMMODE_32BIT:
18319 {
18320 IEM_MC_BEGIN(4, 2);
18321 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18322 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18323 IEM_MC_ARG(uint32_t, u32Value, 2);
18324 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18326 IEM_MC_LOCAL(int32_t, rc);
18327
18328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18330 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18331 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18332 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18333 IEM_MC_REF_EFLAGS(pEFlags);
18334 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18335 IEM_MC_IF_LOCAL_IS_Z(rc) {
18336 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18337 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18338 IEM_MC_ADVANCE_RIP();
18339 } IEM_MC_ELSE() {
18340 IEM_MC_RAISE_DIVIDE_ERROR();
18341 } IEM_MC_ENDIF();
18342
18343 IEM_MC_END();
18344 return VINF_SUCCESS;
18345 }
18346
18347 case IEMMODE_64BIT:
18348 {
18349 IEM_MC_BEGIN(4, 2);
18350 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18351 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18352 IEM_MC_ARG(uint64_t, u64Value, 2);
18353 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18355 IEM_MC_LOCAL(int32_t, rc);
18356
18357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18359 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18360 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18361 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18362 IEM_MC_REF_EFLAGS(pEFlags);
18363 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18364 IEM_MC_IF_LOCAL_IS_Z(rc) {
18365 IEM_MC_ADVANCE_RIP();
18366 } IEM_MC_ELSE() {
18367 IEM_MC_RAISE_DIVIDE_ERROR();
18368 } IEM_MC_ENDIF();
18369
18370 IEM_MC_END();
18371 return VINF_SUCCESS;
18372 }
18373
18374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18375 }
18376 }
18377}
18378
18379/** Opcode 0xf6. */
18380FNIEMOP_DEF(iemOp_Grp3_Eb)
18381{
18382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18383 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18384 {
18385 case 0:
18386 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18387 case 1:
18388/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18389 return IEMOP_RAISE_INVALID_OPCODE();
18390 case 2:
18391 IEMOP_MNEMONIC(not_Eb, "not Eb");
18392 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18393 case 3:
18394 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18395 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18396 case 4:
18397 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18398 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18399 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18400 case 5:
18401 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18402 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18403 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18404 case 6:
18405 IEMOP_MNEMONIC(div_Eb, "div Eb");
18406 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18407 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18408 case 7:
18409 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18410 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18411 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18413 }
18414}
18415
18416
18417/** Opcode 0xf7. */
18418FNIEMOP_DEF(iemOp_Grp3_Ev)
18419{
18420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18421 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18422 {
18423 case 0:
18424 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18425 case 1:
18426/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18427 return IEMOP_RAISE_INVALID_OPCODE();
18428 case 2:
18429 IEMOP_MNEMONIC(not_Ev, "not Ev");
18430 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18431 case 3:
18432 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18433 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18434 case 4:
18435 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18436 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18437 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18438 case 5:
18439 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18440 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18441 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18442 case 6:
18443 IEMOP_MNEMONIC(div_Ev, "div Ev");
18444 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18445 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18446 case 7:
18447 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18448 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18449 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18451 }
18452}
18453
18454
18455/** Opcode 0xf8. */
18456FNIEMOP_DEF(iemOp_clc)
18457{
18458 IEMOP_MNEMONIC(clc, "clc");
18459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18460 IEM_MC_BEGIN(0, 0);
18461 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18462 IEM_MC_ADVANCE_RIP();
18463 IEM_MC_END();
18464 return VINF_SUCCESS;
18465}
18466
18467
18468/** Opcode 0xf9. */
18469FNIEMOP_DEF(iemOp_stc)
18470{
18471 IEMOP_MNEMONIC(stc, "stc");
18472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18473 IEM_MC_BEGIN(0, 0);
18474 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18475 IEM_MC_ADVANCE_RIP();
18476 IEM_MC_END();
18477 return VINF_SUCCESS;
18478}
18479
18480
18481/** Opcode 0xfa. */
18482FNIEMOP_DEF(iemOp_cli)
18483{
18484 IEMOP_MNEMONIC(cli, "cli");
18485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18487}
18488
18489
18490FNIEMOP_DEF(iemOp_sti)
18491{
18492 IEMOP_MNEMONIC(sti, "sti");
18493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18495}
18496
18497
18498/** Opcode 0xfc. */
18499FNIEMOP_DEF(iemOp_cld)
18500{
18501 IEMOP_MNEMONIC(cld, "cld");
18502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18503 IEM_MC_BEGIN(0, 0);
18504 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18505 IEM_MC_ADVANCE_RIP();
18506 IEM_MC_END();
18507 return VINF_SUCCESS;
18508}
18509
18510
18511/** Opcode 0xfd. */
18512FNIEMOP_DEF(iemOp_std)
18513{
18514 IEMOP_MNEMONIC(std, "std");
18515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18516 IEM_MC_BEGIN(0, 0);
18517 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18518 IEM_MC_ADVANCE_RIP();
18519 IEM_MC_END();
18520 return VINF_SUCCESS;
18521}
18522
18523
18524/** Opcode 0xfe. */
18525FNIEMOP_DEF(iemOp_Grp4)
18526{
18527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18528 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18529 {
18530 case 0:
18531 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18532 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18533 case 1:
18534 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18535 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18536 default:
18537 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18538 return IEMOP_RAISE_INVALID_OPCODE();
18539 }
18540}
18541
18542
18543/**
18544 * Opcode 0xff /2.
18545 * @param bRm The RM byte.
18546 */
18547FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18548{
18549 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18550 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18551
18552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18553 {
18554 /* The new RIP is taken from a register. */
18555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18556 switch (pVCpu->iem.s.enmEffOpSize)
18557 {
18558 case IEMMODE_16BIT:
18559 IEM_MC_BEGIN(1, 0);
18560 IEM_MC_ARG(uint16_t, u16Target, 0);
18561 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18562 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18563 IEM_MC_END()
18564 return VINF_SUCCESS;
18565
18566 case IEMMODE_32BIT:
18567 IEM_MC_BEGIN(1, 0);
18568 IEM_MC_ARG(uint32_t, u32Target, 0);
18569 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18570 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18571 IEM_MC_END()
18572 return VINF_SUCCESS;
18573
18574 case IEMMODE_64BIT:
18575 IEM_MC_BEGIN(1, 0);
18576 IEM_MC_ARG(uint64_t, u64Target, 0);
18577 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18578 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18579 IEM_MC_END()
18580 return VINF_SUCCESS;
18581
18582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18583 }
18584 }
18585 else
18586 {
18587 /* The new RIP is taken from a register. */
18588 switch (pVCpu->iem.s.enmEffOpSize)
18589 {
18590 case IEMMODE_16BIT:
18591 IEM_MC_BEGIN(1, 1);
18592 IEM_MC_ARG(uint16_t, u16Target, 0);
18593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18596 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18597 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18598 IEM_MC_END()
18599 return VINF_SUCCESS;
18600
18601 case IEMMODE_32BIT:
18602 IEM_MC_BEGIN(1, 1);
18603 IEM_MC_ARG(uint32_t, u32Target, 0);
18604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18607 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18608 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18609 IEM_MC_END()
18610 return VINF_SUCCESS;
18611
18612 case IEMMODE_64BIT:
18613 IEM_MC_BEGIN(1, 1);
18614 IEM_MC_ARG(uint64_t, u64Target, 0);
18615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18618 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18619 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18620 IEM_MC_END()
18621 return VINF_SUCCESS;
18622
18623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18624 }
18625 }
18626}
18627
18628typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18629
18630FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18631{
18632 /* Registers? How?? */
18633 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18634 { /* likely */ }
18635 else
18636 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18637
18638 /* Far pointer loaded from memory. */
18639 switch (pVCpu->iem.s.enmEffOpSize)
18640 {
18641 case IEMMODE_16BIT:
18642 IEM_MC_BEGIN(3, 1);
18643 IEM_MC_ARG(uint16_t, u16Sel, 0);
18644 IEM_MC_ARG(uint16_t, offSeg, 1);
18645 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18649 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18650 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18651 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18652 IEM_MC_END();
18653 return VINF_SUCCESS;
18654
18655 case IEMMODE_64BIT:
18656 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18657 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18658 * and call far qword [rsp] encodings. */
18659 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18660 {
18661 IEM_MC_BEGIN(3, 1);
18662 IEM_MC_ARG(uint16_t, u16Sel, 0);
18663 IEM_MC_ARG(uint64_t, offSeg, 1);
18664 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18668 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18669 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18670 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18671 IEM_MC_END();
18672 return VINF_SUCCESS;
18673 }
18674 /* AMD falls thru. */
18675 /* fall thru */
18676
18677 case IEMMODE_32BIT:
18678 IEM_MC_BEGIN(3, 1);
18679 IEM_MC_ARG(uint16_t, u16Sel, 0);
18680 IEM_MC_ARG(uint32_t, offSeg, 1);
18681 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18685 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18686 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18687 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18688 IEM_MC_END();
18689 return VINF_SUCCESS;
18690
18691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18692 }
18693}
18694
18695
18696/**
18697 * Opcode 0xff /3.
18698 * @param bRm The RM byte.
18699 */
18700FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18701{
18702 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18703 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18704}
18705
18706
18707/**
18708 * Opcode 0xff /4.
18709 * @param bRm The RM byte.
18710 */
18711FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18712{
18713 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18714 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18715
18716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18717 {
18718 /* The new RIP is taken from a register. */
18719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18720 switch (pVCpu->iem.s.enmEffOpSize)
18721 {
18722 case IEMMODE_16BIT:
18723 IEM_MC_BEGIN(0, 1);
18724 IEM_MC_LOCAL(uint16_t, u16Target);
18725 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18726 IEM_MC_SET_RIP_U16(u16Target);
18727 IEM_MC_END()
18728 return VINF_SUCCESS;
18729
18730 case IEMMODE_32BIT:
18731 IEM_MC_BEGIN(0, 1);
18732 IEM_MC_LOCAL(uint32_t, u32Target);
18733 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18734 IEM_MC_SET_RIP_U32(u32Target);
18735 IEM_MC_END()
18736 return VINF_SUCCESS;
18737
18738 case IEMMODE_64BIT:
18739 IEM_MC_BEGIN(0, 1);
18740 IEM_MC_LOCAL(uint64_t, u64Target);
18741 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18742 IEM_MC_SET_RIP_U64(u64Target);
18743 IEM_MC_END()
18744 return VINF_SUCCESS;
18745
18746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18747 }
18748 }
18749 else
18750 {
18751 /* The new RIP is taken from a memory location. */
18752 switch (pVCpu->iem.s.enmEffOpSize)
18753 {
18754 case IEMMODE_16BIT:
18755 IEM_MC_BEGIN(0, 2);
18756 IEM_MC_LOCAL(uint16_t, u16Target);
18757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18760 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18761 IEM_MC_SET_RIP_U16(u16Target);
18762 IEM_MC_END()
18763 return VINF_SUCCESS;
18764
18765 case IEMMODE_32BIT:
18766 IEM_MC_BEGIN(0, 2);
18767 IEM_MC_LOCAL(uint32_t, u32Target);
18768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18771 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18772 IEM_MC_SET_RIP_U32(u32Target);
18773 IEM_MC_END()
18774 return VINF_SUCCESS;
18775
18776 case IEMMODE_64BIT:
18777 IEM_MC_BEGIN(0, 2);
18778 IEM_MC_LOCAL(uint64_t, u64Target);
18779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18782 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18783 IEM_MC_SET_RIP_U64(u64Target);
18784 IEM_MC_END()
18785 return VINF_SUCCESS;
18786
18787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18788 }
18789 }
18790}
18791
18792
18793/**
18794 * Opcode 0xff /5.
18795 * @param bRm The RM byte.
18796 */
18797FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18798{
18799 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18800 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18801}
18802
18803
18804/**
18805 * Opcode 0xff /6.
18806 * @param bRm The RM byte.
18807 */
18808FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18809{
18810 IEMOP_MNEMONIC(push_Ev, "push Ev");
18811
18812 /* Registers are handled by a common worker. */
18813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18814 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18815
18816 /* Memory we do here. */
18817 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18818 switch (pVCpu->iem.s.enmEffOpSize)
18819 {
18820 case IEMMODE_16BIT:
18821 IEM_MC_BEGIN(0, 2);
18822 IEM_MC_LOCAL(uint16_t, u16Src);
18823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18826 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18827 IEM_MC_PUSH_U16(u16Src);
18828 IEM_MC_ADVANCE_RIP();
18829 IEM_MC_END();
18830 return VINF_SUCCESS;
18831
18832 case IEMMODE_32BIT:
18833 IEM_MC_BEGIN(0, 2);
18834 IEM_MC_LOCAL(uint32_t, u32Src);
18835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18838 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18839 IEM_MC_PUSH_U32(u32Src);
18840 IEM_MC_ADVANCE_RIP();
18841 IEM_MC_END();
18842 return VINF_SUCCESS;
18843
18844 case IEMMODE_64BIT:
18845 IEM_MC_BEGIN(0, 2);
18846 IEM_MC_LOCAL(uint64_t, u64Src);
18847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18850 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18851 IEM_MC_PUSH_U64(u64Src);
18852 IEM_MC_ADVANCE_RIP();
18853 IEM_MC_END();
18854 return VINF_SUCCESS;
18855
18856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18857 }
18858}
18859
18860
18861/** Opcode 0xff. */
18862FNIEMOP_DEF(iemOp_Grp5)
18863{
18864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18865 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18866 {
18867 case 0:
18868 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18869 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18870 case 1:
18871 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18872 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18873 case 2:
18874 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18875 case 3:
18876 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18877 case 4:
18878 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18879 case 5:
18880 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18881 case 6:
18882 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18883 case 7:
18884 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18885 return IEMOP_RAISE_INVALID_OPCODE();
18886 }
18887 AssertFailedReturn(VERR_IEM_IPE_3);
18888}
18889
18890
18891
18892const PFNIEMOP g_apfnOneByteMap[256] =
18893{
18894 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18895 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18896 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18897 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18898 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18899 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18900 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18901 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18902 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18903 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18904 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18905 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18906 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18907 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18908 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18909 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18910 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18911 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18912 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18913 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18914 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18915 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18916 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18917 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18918 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18919 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18920 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18921 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18922 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18923 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18924 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18925 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18926 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18927 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18928 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18929 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18930 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18931 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18932 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18933 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18934 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18935 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18936 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18937 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18938 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18939 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18940 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18941 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18942 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18943 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18944 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18945 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18946 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18947 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18948 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18949 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18950 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18951 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18952 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18953 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18954 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18955 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18956 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18957 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18958};
18959
18960
18961/** @} */
18962
18963#ifdef _MSC_VER
18964# pragma warning(pop)
18965#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette