VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65757

Last change on this file since 65757 was 65757, checked in by vboxsync, 8 years ago

IEM: 0x0f 0xe7 split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 667.9 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65757 2017-02-13 09:27:54Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
560 {
561 RTGCPTR GCPtrEff;
562 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
563 if (rcStrict != VINF_SUCCESS)
564 return rcStrict;
565 }
566#endif
567 IEMOP_HLP_DONE_DECODING();
568 }
569 return IEMOP_RAISE_INVALID_OPCODE();
570}
571
572
573/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
574 * immediate. */
575FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
576{
577 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
578 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
579 {
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
581#ifndef TST_IEM_CHECK_MC
582 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
583 {
584 RTGCPTR GCPtrEff;
585 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
586 if (rcStrict != VINF_SUCCESS)
587 return rcStrict;
588 }
589#endif
590 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
591 IEMOP_HLP_DONE_DECODING();
592 }
593 return IEMOP_RAISE_INVALID_OPCODE();
594}
595
596
597/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
598 * sequence. */
599FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
600{
601 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
603 {
604 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
606#ifndef TST_IEM_CHECK_MC
607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
608 {
609 RTGCPTR GCPtrEff;
610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
611 if (rcStrict != VINF_SUCCESS)
612 return rcStrict;
613 }
614#endif
615 IEMOP_HLP_DONE_DECODING();
616 }
617 return IEMOP_RAISE_INVALID_OPCODE();
618}
619
620
621/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
622 * a 8-byte immediate. */
623FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
624{
625 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
626 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
627 {
628 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
630#ifndef TST_IEM_CHECK_MC
631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
632 {
633 RTGCPTR GCPtrEff;
634 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
635 if (rcStrict != VINF_SUCCESS)
636 return rcStrict;
637 }
638#endif
639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
640 IEMOP_HLP_DONE_DECODING();
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643}
644
645
646
647/** @name ..... opcodes.
648 *
649 * @{
650 */
651
652/** @} */
653
654
655/** @name Two byte opcodes (first byte 0x0f).
656 *
657 * @{
658 */
659
660/** Opcode 0x0f 0x00 /0. */
661FNIEMOPRM_DEF(iemOp_Grp6_sldt)
662{
663 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
664 IEMOP_HLP_MIN_286();
665 IEMOP_HLP_NO_REAL_OR_V86_MODE();
666
667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
668 {
669 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
670 switch (pVCpu->iem.s.enmEffOpSize)
671 {
672 case IEMMODE_16BIT:
673 IEM_MC_BEGIN(0, 1);
674 IEM_MC_LOCAL(uint16_t, u16Ldtr);
675 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
676 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
677 IEM_MC_ADVANCE_RIP();
678 IEM_MC_END();
679 break;
680
681 case IEMMODE_32BIT:
682 IEM_MC_BEGIN(0, 1);
683 IEM_MC_LOCAL(uint32_t, u32Ldtr);
684 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
685 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
686 IEM_MC_ADVANCE_RIP();
687 IEM_MC_END();
688 break;
689
690 case IEMMODE_64BIT:
691 IEM_MC_BEGIN(0, 1);
692 IEM_MC_LOCAL(uint64_t, u64Ldtr);
693 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
694 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
695 IEM_MC_ADVANCE_RIP();
696 IEM_MC_END();
697 break;
698
699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
700 }
701 }
702 else
703 {
704 IEM_MC_BEGIN(0, 2);
705 IEM_MC_LOCAL(uint16_t, u16Ldtr);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
708 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
709 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
710 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
711 IEM_MC_ADVANCE_RIP();
712 IEM_MC_END();
713 }
714 return VINF_SUCCESS;
715}
716
717
718/** Opcode 0x0f 0x00 /1. */
719FNIEMOPRM_DEF(iemOp_Grp6_str)
720{
721 IEMOP_MNEMONIC(str, "str Rv/Mw");
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 switch (pVCpu->iem.s.enmEffOpSize)
729 {
730 case IEMMODE_16BIT:
731 IEM_MC_BEGIN(0, 1);
732 IEM_MC_LOCAL(uint16_t, u16Tr);
733 IEM_MC_FETCH_TR_U16(u16Tr);
734 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
735 IEM_MC_ADVANCE_RIP();
736 IEM_MC_END();
737 break;
738
739 case IEMMODE_32BIT:
740 IEM_MC_BEGIN(0, 1);
741 IEM_MC_LOCAL(uint32_t, u32Tr);
742 IEM_MC_FETCH_TR_U32(u32Tr);
743 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
744 IEM_MC_ADVANCE_RIP();
745 IEM_MC_END();
746 break;
747
748 case IEMMODE_64BIT:
749 IEM_MC_BEGIN(0, 1);
750 IEM_MC_LOCAL(uint64_t, u64Tr);
751 IEM_MC_FETCH_TR_U64(u64Tr);
752 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
753 IEM_MC_ADVANCE_RIP();
754 IEM_MC_END();
755 break;
756
757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
758 }
759 }
760 else
761 {
762 IEM_MC_BEGIN(0, 2);
763 IEM_MC_LOCAL(uint16_t, u16Tr);
764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
766 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
767 IEM_MC_FETCH_TR_U16(u16Tr);
768 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 return VINF_SUCCESS;
773}
774
775
776/** Opcode 0x0f 0x00 /2. */
777FNIEMOPRM_DEF(iemOp_Grp6_lldt)
778{
779 IEMOP_MNEMONIC(lldt, "lldt Ew");
780 IEMOP_HLP_MIN_286();
781 IEMOP_HLP_NO_REAL_OR_V86_MODE();
782
783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
784 {
785 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
786 IEM_MC_BEGIN(1, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 0);
788 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 else
793 {
794 IEM_MC_BEGIN(1, 1);
795 IEM_MC_ARG(uint16_t, u16Sel, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
799 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
800 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
802 IEM_MC_END();
803 }
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x00 /3. */
809FNIEMOPRM_DEF(iemOp_Grp6_ltr)
810{
811 IEMOP_MNEMONIC(ltr, "ltr Ew");
812 IEMOP_HLP_MIN_286();
813 IEMOP_HLP_NO_REAL_OR_V86_MODE();
814
815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
816 {
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_BEGIN(1, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 0);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 else
825 {
826 IEM_MC_BEGIN(1, 1);
827 IEM_MC_ARG(uint16_t, u16Sel, 0);
828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
832 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
833 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
834 IEM_MC_END();
835 }
836 return VINF_SUCCESS;
837}
838
839
840/** Opcode 0x0f 0x00 /3. */
841FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
842{
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_NO_REAL_OR_V86_MODE();
845
846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
847 {
848 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Sel, 0);
851 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
852 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 else
857 {
858 IEM_MC_BEGIN(2, 1);
859 IEM_MC_ARG(uint16_t, u16Sel, 0);
860 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
863 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
864 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
866 IEM_MC_END();
867 }
868 return VINF_SUCCESS;
869}
870
871
872/** Opcode 0x0f 0x00 /4. */
873FNIEMOPRM_DEF(iemOp_Grp6_verr)
874{
875 IEMOP_MNEMONIC(verr, "verr Ew");
876 IEMOP_HLP_MIN_286();
877 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
878}
879
880
881/** Opcode 0x0f 0x00 /5. */
882FNIEMOPRM_DEF(iemOp_Grp6_verw)
883{
884 IEMOP_MNEMONIC(verw, "verw Ew");
885 IEMOP_HLP_MIN_286();
886 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
887}
888
889
890/**
891 * Group 6 jump table.
892 */
893IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
894{
895 iemOp_Grp6_sldt,
896 iemOp_Grp6_str,
897 iemOp_Grp6_lldt,
898 iemOp_Grp6_ltr,
899 iemOp_Grp6_verr,
900 iemOp_Grp6_verw,
901 iemOp_InvalidWithRM,
902 iemOp_InvalidWithRM
903};
904
905/** Opcode 0x0f 0x00. */
906FNIEMOP_DEF(iemOp_Grp6)
907{
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
910}
911
912
913/** Opcode 0x0f 0x01 /0. */
914FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
915{
916 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
917 IEMOP_HLP_MIN_286();
918 IEMOP_HLP_64BIT_OP_SIZE();
919 IEM_MC_BEGIN(2, 1);
920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
925 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 /0. */
932FNIEMOP_DEF(iemOp_Grp7_vmcall)
933{
934 IEMOP_BITCH_ABOUT_STUB();
935 return IEMOP_RAISE_INVALID_OPCODE();
936}
937
938
939/** Opcode 0x0f 0x01 /0. */
940FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
941{
942 IEMOP_BITCH_ABOUT_STUB();
943 return IEMOP_RAISE_INVALID_OPCODE();
944}
945
946
947/** Opcode 0x0f 0x01 /0. */
948FNIEMOP_DEF(iemOp_Grp7_vmresume)
949{
950 IEMOP_BITCH_ABOUT_STUB();
951 return IEMOP_RAISE_INVALID_OPCODE();
952}
953
954
955/** Opcode 0x0f 0x01 /0. */
956FNIEMOP_DEF(iemOp_Grp7_vmxoff)
957{
958 IEMOP_BITCH_ABOUT_STUB();
959 return IEMOP_RAISE_INVALID_OPCODE();
960}
961
962
963/** Opcode 0x0f 0x01 /1. */
964FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
965{
966 IEMOP_MNEMONIC(sidt, "sidt Ms");
967 IEMOP_HLP_MIN_286();
968 IEMOP_HLP_64BIT_OP_SIZE();
969 IEM_MC_BEGIN(2, 1);
970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
971 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
975 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
976 IEM_MC_END();
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /1. */
982FNIEMOP_DEF(iemOp_Grp7_monitor)
983{
984 IEMOP_MNEMONIC(monitor, "monitor");
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
986 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
987}
988
989
990/** Opcode 0x0f 0x01 /1. */
991FNIEMOP_DEF(iemOp_Grp7_mwait)
992{
993 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
996}
997
998
999/** Opcode 0x0f 0x01 /2. */
1000FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1001{
1002 IEMOP_MNEMONIC(lgdt, "lgdt");
1003 IEMOP_HLP_64BIT_OP_SIZE();
1004 IEM_MC_BEGIN(3, 1);
1005 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1006 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1007 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1011 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1012 IEM_MC_END();
1013 return VINF_SUCCESS;
1014}
1015
1016
1017/** Opcode 0x0f 0x01 0xd0. */
1018FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1019{
1020 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1021 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x0f 0x01 0xd1. */
1031FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1032{
1033 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1034 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1035 {
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1037 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1038 }
1039 return IEMOP_RAISE_INVALID_OPCODE();
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /3. */
1044FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1045{
1046 IEMOP_MNEMONIC(lidt, "lidt");
1047 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1048 ? IEMMODE_64BIT
1049 : pVCpu->iem.s.enmEffOpSize;
1050 IEM_MC_BEGIN(3, 1);
1051 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1057 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1058 IEM_MC_END();
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/** Opcode 0x0f 0x01 0xd8. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1065
1066/** Opcode 0x0f 0x01 0xd9. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1068
1069/** Opcode 0x0f 0x01 0xda. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1071
1072/** Opcode 0x0f 0x01 0xdb. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1074
1075/** Opcode 0x0f 0x01 0xdc. */
1076FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1077
1078/** Opcode 0x0f 0x01 0xdd. */
1079FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1080
1081/** Opcode 0x0f 0x01 0xde. */
1082FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1083
1084/** Opcode 0x0f 0x01 0xdf. */
1085FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1086
1087/** Opcode 0x0f 0x01 /4. */
1088FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1089{
1090 IEMOP_MNEMONIC(smsw, "smsw");
1091 IEMOP_HLP_MIN_286();
1092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1093 {
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 switch (pVCpu->iem.s.enmEffOpSize)
1096 {
1097 case IEMMODE_16BIT:
1098 IEM_MC_BEGIN(0, 1);
1099 IEM_MC_LOCAL(uint16_t, u16Tmp);
1100 IEM_MC_FETCH_CR0_U16(u16Tmp);
1101 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1102 { /* likely */ }
1103 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1104 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1105 else
1106 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1107 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 return VINF_SUCCESS;
1111
1112 case IEMMODE_32BIT:
1113 IEM_MC_BEGIN(0, 1);
1114 IEM_MC_LOCAL(uint32_t, u32Tmp);
1115 IEM_MC_FETCH_CR0_U32(u32Tmp);
1116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 return VINF_SUCCESS;
1120
1121 case IEMMODE_64BIT:
1122 IEM_MC_BEGIN(0, 1);
1123 IEM_MC_LOCAL(uint64_t, u64Tmp);
1124 IEM_MC_FETCH_CR0_U64(u64Tmp);
1125 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1126 IEM_MC_ADVANCE_RIP();
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129
1130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1131 }
1132 }
1133 else
1134 {
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(0, 2);
1137 IEM_MC_LOCAL(uint16_t, u16Tmp);
1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_FETCH_CR0_U16(u16Tmp);
1142 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1143 { /* likely */ }
1144 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1145 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1146 else
1147 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 return VINF_SUCCESS;
1152 }
1153}
1154
1155
1156/** Opcode 0x0f 0x01 /6. */
1157FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1158{
1159 /* The operand size is effectively ignored, all is 16-bit and only the
1160 lower 3-bits are used. */
1161 IEMOP_MNEMONIC(lmsw, "lmsw");
1162 IEMOP_HLP_MIN_286();
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(1, 0);
1167 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1168 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1170 IEM_MC_END();
1171 }
1172 else
1173 {
1174 IEM_MC_BEGIN(1, 1);
1175 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1179 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1180 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x01 /7. */
1188FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1189{
1190 IEMOP_MNEMONIC(invlpg, "invlpg");
1191 IEMOP_HLP_MIN_486();
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(1, 1);
1194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1196 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199}
1200
1201
1202/** Opcode 0x0f 0x01 /7. */
1203FNIEMOP_DEF(iemOp_Grp7_swapgs)
1204{
1205 IEMOP_MNEMONIC(swapgs, "swapgs");
1206 IEMOP_HLP_ONLY_64BIT();
1207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1209}
1210
1211
1212/** Opcode 0x0f 0x01 /7. */
1213FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1214{
1215 NOREF(pVCpu);
1216 IEMOP_BITCH_ABOUT_STUB();
1217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1218}
1219
1220
1221/** Opcode 0x0f 0x01. */
1222FNIEMOP_DEF(iemOp_Grp7)
1223{
1224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1226 {
1227 case 0:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1233 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1234 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1235 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1236 }
1237 return IEMOP_RAISE_INVALID_OPCODE();
1238
1239 case 1:
1240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1241 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1242 switch (bRm & X86_MODRM_RM_MASK)
1243 {
1244 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1245 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1246 }
1247 return IEMOP_RAISE_INVALID_OPCODE();
1248
1249 case 2:
1250 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1251 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1252 switch (bRm & X86_MODRM_RM_MASK)
1253 {
1254 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1255 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1256 }
1257 return IEMOP_RAISE_INVALID_OPCODE();
1258
1259 case 3:
1260 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1261 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1262 switch (bRm & X86_MODRM_RM_MASK)
1263 {
1264 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1265 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1266 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1267 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1268 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1269 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1270 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1271 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1273 }
1274
1275 case 4:
1276 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1277
1278 case 5:
1279 return IEMOP_RAISE_INVALID_OPCODE();
1280
1281 case 6:
1282 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1283
1284 case 7:
1285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1286 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1287 switch (bRm & X86_MODRM_RM_MASK)
1288 {
1289 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1290 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1291 }
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296}
1297
1298/** Opcode 0x0f 0x00 /3. */
1299FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1300{
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1303
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1307 switch (pVCpu->iem.s.enmEffOpSize)
1308 {
1309 case IEMMODE_16BIT:
1310 {
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Sel, 1);
1314 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1315
1316 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1318 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1319
1320 IEM_MC_END();
1321 return VINF_SUCCESS;
1322 }
1323
1324 case IEMMODE_32BIT:
1325 case IEMMODE_64BIT:
1326 {
1327 IEM_MC_BEGIN(3, 0);
1328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Sel, 1);
1330 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1331
1332 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1333 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1334 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1335
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1341 }
1342 }
1343 else
1344 {
1345 switch (pVCpu->iem.s.enmEffOpSize)
1346 {
1347 case IEMMODE_16BIT:
1348 {
1349 IEM_MC_BEGIN(3, 1);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Sel, 1);
1352 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1354
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1356 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1357
1358 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1361
1362 IEM_MC_END();
1363 return VINF_SUCCESS;
1364 }
1365
1366 case IEMMODE_32BIT:
1367 case IEMMODE_64BIT:
1368 {
1369 IEM_MC_BEGIN(3, 1);
1370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1371 IEM_MC_ARG(uint16_t, u16Sel, 1);
1372 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1377/** @todo testcase: make sure it's a 16-bit read. */
1378
1379 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1380 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1381 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1382
1383 IEM_MC_END();
1384 return VINF_SUCCESS;
1385 }
1386
1387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1388 }
1389 }
1390}
1391
1392
1393
1394/** Opcode 0x0f 0x02. */
1395FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1396{
1397 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1398 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1399}
1400
1401
1402/** Opcode 0x0f 0x03. */
1403FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1404{
1405 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1406 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1407}
1408
1409
1410/** Opcode 0x0f 0x05. */
1411FNIEMOP_DEF(iemOp_syscall)
1412{
1413 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1416}
1417
1418
1419/** Opcode 0x0f 0x06. */
1420FNIEMOP_DEF(iemOp_clts)
1421{
1422 IEMOP_MNEMONIC(clts, "clts");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1425}
1426
1427
1428/** Opcode 0x0f 0x07. */
1429FNIEMOP_DEF(iemOp_sysret)
1430{
1431 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1434}
1435
1436
1437/** Opcode 0x0f 0x08. */
1438FNIEMOP_STUB(iemOp_invd);
1439// IEMOP_HLP_MIN_486();
1440
1441
1442/** Opcode 0x0f 0x09. */
1443FNIEMOP_DEF(iemOp_wbinvd)
1444{
1445 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1446 IEMOP_HLP_MIN_486();
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS; /* ignore for now */
1453}
1454
1455
1456/** Opcode 0x0f 0x0b. */
1457FNIEMOP_DEF(iemOp_ud2)
1458{
1459 IEMOP_MNEMONIC(ud2, "ud2");
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462
1463/** Opcode 0x0f 0x0d. */
1464FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1465{
1466 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1468 {
1469 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1477 return IEMOP_RAISE_INVALID_OPCODE();
1478 }
1479
1480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1481 {
1482 case 2: /* Aliased to /0 for the time being. */
1483 case 4: /* Aliased to /0 for the time being. */
1484 case 5: /* Aliased to /0 for the time being. */
1485 case 6: /* Aliased to /0 for the time being. */
1486 case 7: /* Aliased to /0 for the time being. */
1487 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1488 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1489 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1491 }
1492
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 /* Currently a NOP. */
1498 NOREF(GCPtrEffSrc);
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 return VINF_SUCCESS;
1502}
1503
1504
1505/** Opcode 0x0f 0x0e. */
1506FNIEMOP_STUB(iemOp_femms);
1507
1508
1509/** Opcode 0x0f 0x0f 0x0c. */
1510FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x0d. */
1513FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x1c. */
1516FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x1d. */
1519FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x8a. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x8e. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x90. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x94. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1532
1533/** Opcode 0x0f 0x0f 0x96. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0x97. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0x9a. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0x9e. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xa0. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xa4. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xa6. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xa7. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xaa. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xae. */
1561FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xb0. */
1564FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xb4. */
1567FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1568
1569/** Opcode 0x0f 0x0f 0xb6. */
1570FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1571
1572/** Opcode 0x0f 0x0f 0xb7. */
1573FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1574
1575/** Opcode 0x0f 0x0f 0xbb. */
1576FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1577
1578/** Opcode 0x0f 0x0f 0xbf. */
1579FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1580
1581
1582/** Opcode 0x0f 0x0f. */
1583FNIEMOP_DEF(iemOp_3Dnow)
1584{
1585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1586 {
1587 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 }
1590
1591 /* This is pretty sparse, use switch instead of table. */
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 switch (b)
1594 {
1595 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1596 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1597 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1598 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1599 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1600 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1601 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1602 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1603 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1604 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1605 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1606 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1607 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1608 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1609 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1610 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1611 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1612 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1613 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1614 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1615 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1616 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1617 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1618 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622}
1623
1624
1625/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1626FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1627/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1628FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1629/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1630FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1631/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1632FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1633
1634
1635/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1636FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1637{
1638 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1646 IEM_MC_BEGIN(0, 0);
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1651 IEM_MC_ADVANCE_RIP();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 /*
1657 * Memory, register.
1658 */
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1679FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1680
1681/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1682FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1683
1684/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1685FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1686{
1687 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1690 {
1691 /*
1692 * Register, register.
1693 */
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697
1698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1700 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1701 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1702
1703 IEM_MC_ADVANCE_RIP();
1704 IEM_MC_END();
1705 }
1706 else
1707 {
1708 /*
1709 * Memory, register.
1710 */
1711 IEM_MC_BEGIN(0, 2);
1712 IEM_MC_LOCAL(uint64_t, uSrc);
1713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1714
1715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1719
1720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1722
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1732
1733/** Opcode 0x66 0x0f 0x12. */
1734FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1735
1736/** Opcode 0xf3 0x0f 0x12. */
1737FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1738
1739/** Opcode 0xf2 0x0f 0x12. */
1740FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1741
1742/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1743FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1744
1745/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1746FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1747{
1748 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752#if 0
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1757 IEM_MC_BEGIN(0, 1);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1761 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765#else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767#endif
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/* Opcode 0xf3 0x0f 0x13 - invalid */
1793/* Opcode 0xf2 0x0f 0x13 - invalid */
1794
1795/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1796FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x14 - invalid */
1800/* Opcode 0xf2 0x0f 0x14 - invalid */
1801/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1802FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1803/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1804FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1805/* Opcode 0xf3 0x0f 0x15 - invalid */
1806/* Opcode 0xf2 0x0f 0x15 - invalid */
1807/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1808FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1809/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1810FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1811/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1812FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1813/* Opcode 0xf2 0x0f 0x16 - invalid */
1814/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1815FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1816/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1817FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1818/* Opcode 0xf3 0x0f 0x17 - invalid */
1819/* Opcode 0xf2 0x0f 0x17 - invalid */
1820
1821
1822/** Opcode 0x0f 0x18. */
1823FNIEMOP_DEF(iemOp_prefetch_Grp16)
1824{
1825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1826 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1827 {
1828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1829 {
1830 case 4: /* Aliased to /0 for the time being according to AMD. */
1831 case 5: /* Aliased to /0 for the time being according to AMD. */
1832 case 6: /* Aliased to /0 for the time being according to AMD. */
1833 case 7: /* Aliased to /0 for the time being according to AMD. */
1834 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1835 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1836 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1837 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1839 }
1840
1841 IEM_MC_BEGIN(0, 1);
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 /* Currently a NOP. */
1846 NOREF(GCPtrEffSrc);
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 return VINF_SUCCESS;
1850 }
1851
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/** Opcode 0x0f 0x19..0x1f. */
1857FNIEMOP_DEF(iemOp_nop_Ev)
1858{
1859 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 IEM_MC_BEGIN(0, 1);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 /* Currently a NOP. */
1875 NOREF(GCPtrEffSrc);
1876 IEM_MC_ADVANCE_RIP();
1877 IEM_MC_END();
1878 }
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** Opcode 0x0f 0x20. */
1884FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1885{
1886 /* mod is ignored, as is operand size overrides. */
1887 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1888 IEMOP_HLP_MIN_386();
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1891 else
1892 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1893
1894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1895 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1897 {
1898 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1900 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1901 iCrReg |= 8;
1902 }
1903 switch (iCrReg)
1904 {
1905 case 0: case 2: case 3: case 4: case 8:
1906 break;
1907 default:
1908 return IEMOP_RAISE_INVALID_OPCODE();
1909 }
1910 IEMOP_HLP_DONE_DECODING();
1911
1912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1913}
1914
1915
1916/** Opcode 0x0f 0x21. */
1917FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1918{
1919 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1920 IEMOP_HLP_MIN_386();
1921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1924 return IEMOP_RAISE_INVALID_OPCODE();
1925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1926 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1927 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1928}
1929
1930
1931/** Opcode 0x0f 0x22. */
1932FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1933{
1934 /* mod is ignored, as is operand size overrides. */
1935 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1936 IEMOP_HLP_MIN_386();
1937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1938 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1939 else
1940 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1941
1942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1943 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1945 {
1946 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1948 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1949 iCrReg |= 8;
1950 }
1951 switch (iCrReg)
1952 {
1953 case 0: case 2: case 3: case 4: case 8:
1954 break;
1955 default:
1956 return IEMOP_RAISE_INVALID_OPCODE();
1957 }
1958 IEMOP_HLP_DONE_DECODING();
1959
1960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1961}
1962
1963
1964/** Opcode 0x0f 0x23. */
1965FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1966{
1967 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1968 IEMOP_HLP_MIN_386();
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1972 return IEMOP_RAISE_INVALID_OPCODE();
1973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1974 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1975 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1976}
1977
1978
1979/** Opcode 0x0f 0x24. */
1980FNIEMOP_DEF(iemOp_mov_Rd_Td)
1981{
1982 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1983 /** @todo works on 386 and 486. */
1984 /* The RM byte is not considered, see testcase. */
1985 return IEMOP_RAISE_INVALID_OPCODE();
1986}
1987
1988
1989/** Opcode 0x0f 0x26. */
1990FNIEMOP_DEF(iemOp_mov_Td_Rd)
1991{
1992 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1993 /** @todo works on 386 and 486. */
1994 /* The RM byte is not considered, see testcase. */
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
2000FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 /*
2007 * Register, register.
2008 */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_BEGIN(0, 0);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2042FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2043{
2044 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2056 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081}
2082
2083/* Opcode 0xf3 0x0f 0x28 - invalid */
2084/* Opcode 0xf2 0x0f 0x28 - invalid */
2085
2086/** Opcode 0x0f 0x29. */
2087FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2088{
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2091 else
2092 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2095 {
2096 /*
2097 * Register, register.
2098 */
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2100 IEM_MC_BEGIN(0, 0);
2101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 else
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2106 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2107 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Memory, register.
2115 */
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2124 else
2125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2127
2128 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2129 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135}
2136
2137
2138/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2139FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2140/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2141FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2142/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2143FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2144/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2145FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2146
2147
2148/** Opcode 0x0f 0x2b. */
2149FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2150{
2151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2152 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2153 else
2154 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2157 {
2158 /*
2159 * memory, register.
2160 */
2161 IEM_MC_BEGIN(0, 2);
2162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 else
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172
2173 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2187FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2188/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2189FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2190/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2191FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2192/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2193FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2194
2195/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2196FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2197/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2198FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2199/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2200FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2201/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2202FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2203
2204/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2205FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2206/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2207FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2208/* Opcode 0xf3 0x0f 0x2e - invalid */
2209/* Opcode 0xf2 0x0f 0x2e - invalid */
2210
2211/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2212FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2213/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2214FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2215/* Opcode 0xf3 0x0f 0x2f - invalid */
2216/* Opcode 0xf2 0x0f 0x2f - invalid */
2217
2218/** Opcode 0x0f 0x30. */
2219FNIEMOP_DEF(iemOp_wrmsr)
2220{
2221 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2224}
2225
2226
2227/** Opcode 0x0f 0x31. */
2228FNIEMOP_DEF(iemOp_rdtsc)
2229{
2230 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2233}
2234
2235
2236/** Opcode 0x0f 0x33. */
2237FNIEMOP_DEF(iemOp_rdmsr)
2238{
2239 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2242}
2243
2244
2245/** Opcode 0x0f 0x34. */
2246FNIEMOP_STUB(iemOp_rdpmc);
2247/** Opcode 0x0f 0x34. */
2248FNIEMOP_STUB(iemOp_sysenter);
2249/** Opcode 0x0f 0x35. */
2250FNIEMOP_STUB(iemOp_sysexit);
2251/** Opcode 0x0f 0x37. */
2252FNIEMOP_STUB(iemOp_getsec);
2253/** Opcode 0x0f 0x38. */
2254FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2255/** Opcode 0x0f 0x3a. */
2256FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2257
2258
2259/**
2260 * Implements a conditional move.
2261 *
2262 * Wish there was an obvious way to do this where we could share and reduce
2263 * code bloat.
2264 *
2265 * @param a_Cnd The conditional "microcode" operation.
2266 */
2267#define CMOV_X(a_Cnd) \
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2270 { \
2271 switch (pVCpu->iem.s.enmEffOpSize) \
2272 { \
2273 case IEMMODE_16BIT: \
2274 IEM_MC_BEGIN(0, 1); \
2275 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2276 a_Cnd { \
2277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 1); \
2286 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2287 a_Cnd { \
2288 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2289 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2290 } IEM_MC_ELSE() { \
2291 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2292 } IEM_MC_ENDIF(); \
2293 IEM_MC_ADVANCE_RIP(); \
2294 IEM_MC_END(); \
2295 return VINF_SUCCESS; \
2296 \
2297 case IEMMODE_64BIT: \
2298 IEM_MC_BEGIN(0, 1); \
2299 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2300 a_Cnd { \
2301 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2303 } IEM_MC_ENDIF(); \
2304 IEM_MC_ADVANCE_RIP(); \
2305 IEM_MC_END(); \
2306 return VINF_SUCCESS; \
2307 \
2308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2309 } \
2310 } \
2311 else \
2312 { \
2313 switch (pVCpu->iem.s.enmEffOpSize) \
2314 { \
2315 case IEMMODE_16BIT: \
2316 IEM_MC_BEGIN(0, 2); \
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2318 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2320 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2321 a_Cnd { \
2322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2323 } IEM_MC_ENDIF(); \
2324 IEM_MC_ADVANCE_RIP(); \
2325 IEM_MC_END(); \
2326 return VINF_SUCCESS; \
2327 \
2328 case IEMMODE_32BIT: \
2329 IEM_MC_BEGIN(0, 2); \
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2331 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2333 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2334 a_Cnd { \
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2336 } IEM_MC_ELSE() { \
2337 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2338 } IEM_MC_ENDIF(); \
2339 IEM_MC_ADVANCE_RIP(); \
2340 IEM_MC_END(); \
2341 return VINF_SUCCESS; \
2342 \
2343 case IEMMODE_64BIT: \
2344 IEM_MC_BEGIN(0, 2); \
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2346 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2349 a_Cnd { \
2350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2351 } IEM_MC_ENDIF(); \
2352 IEM_MC_ADVANCE_RIP(); \
2353 IEM_MC_END(); \
2354 return VINF_SUCCESS; \
2355 \
2356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2357 } \
2358 } do {} while (0)
2359
2360
2361
2362/** Opcode 0x0f 0x40. */
2363FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2364{
2365 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2367}
2368
2369
2370/** Opcode 0x0f 0x41. */
2371FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2372{
2373 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2375}
2376
2377
2378/** Opcode 0x0f 0x42. */
2379FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2380{
2381 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2382 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2383}
2384
2385
2386/** Opcode 0x0f 0x43. */
2387FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2388{
2389 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2390 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2391}
2392
2393
2394/** Opcode 0x0f 0x44. */
2395FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2396{
2397 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2399}
2400
2401
2402/** Opcode 0x0f 0x45. */
2403FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2404{
2405 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2407}
2408
2409
2410/** Opcode 0x0f 0x46. */
2411FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2412{
2413 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2414 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2415}
2416
2417
2418/** Opcode 0x0f 0x47. */
2419FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2420{
2421 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2422 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2423}
2424
2425
2426/** Opcode 0x0f 0x48. */
2427FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2428{
2429 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2430 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2431}
2432
2433
2434/** Opcode 0x0f 0x49. */
2435FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2436{
2437 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2438 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2439}
2440
2441
2442/** Opcode 0x0f 0x4a. */
2443FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2446 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2447}
2448
2449
2450/** Opcode 0x0f 0x4b. */
2451FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2452{
2453 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2455}
2456
2457
2458/** Opcode 0x0f 0x4c. */
2459FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2460{
2461 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2462 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2463}
2464
2465
2466/** Opcode 0x0f 0x4d. */
2467FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2468{
2469 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2470 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2471}
2472
2473
2474/** Opcode 0x0f 0x4e. */
2475FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2476{
2477 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2478 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2479}
2480
2481
2482/** Opcode 0x0f 0x4f. */
2483FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2484{
2485 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2486 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2487}
2488
2489#undef CMOV_X
2490
2491/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2492FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2493/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2494FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2495/* Opcode 0xf3 0x0f 0x50 - invalid */
2496/* Opcode 0xf2 0x0f 0x50 - invalid */
2497
2498/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2499FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2500/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2501FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2502/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2503FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2504/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2505FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2506
2507/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2508FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2509/* Opcode 0x66 0x0f 0x52 - invalid */
2510/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2511FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2512/* Opcode 0xf2 0x0f 0x52 - invalid */
2513
2514/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2515FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2516/* Opcode 0x66 0x0f 0x53 - invalid */
2517/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2518FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2519/* Opcode 0xf2 0x0f 0x53 - invalid */
2520
2521/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2522FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2523/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2524FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2525/* Opcode 0xf3 0x0f 0x54 - invalid */
2526/* Opcode 0xf2 0x0f 0x54 - invalid */
2527
2528/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2529FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2530/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2531FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2532/* Opcode 0xf3 0x0f 0x55 - invalid */
2533/* Opcode 0xf2 0x0f 0x55 - invalid */
2534
2535/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2536FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2537/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2538FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2539/* Opcode 0xf3 0x0f 0x56 - invalid */
2540/* Opcode 0xf2 0x0f 0x56 - invalid */
2541
2542/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2543FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2544/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2545FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2546/* Opcode 0xf3 0x0f 0x57 - invalid */
2547/* Opcode 0xf2 0x0f 0x57 - invalid */
2548
2549/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2568FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2569/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2570FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2571/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2572FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2573/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2574FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2575
2576/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2577FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2578/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2579FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2580/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2581FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2582/* Opcode 0xf2 0x0f 0x5b - invalid */
2583
2584/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2585FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2586/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2587FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2588/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2589FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2590/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2591FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2592
2593/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2594FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2595/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2596FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2597/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2598FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2599/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2600FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2601
2602/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2603FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2604/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2605FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2606/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2607FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2608/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2609FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2610
2611/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2612FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2613/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2614FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2615/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2616FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2617/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2618FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2619
2620/**
2621 * Common worker for MMX instructions on the forms:
2622 * pxxxx mm1, mm2/mem32
2623 *
2624 * The 2nd operand is the first half of a register, which in the memory case
2625 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2626 * memory accessed for MMX.
2627 *
2628 * Exceptions type 4.
2629 */
2630FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 {
2635 /*
2636 * Register, register.
2637 */
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2639 IEM_MC_BEGIN(2, 0);
2640 IEM_MC_ARG(uint128_t *, pDst, 0);
2641 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2643 IEM_MC_PREPARE_SSE_USAGE();
2644 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2645 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2646 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2647 IEM_MC_ADVANCE_RIP();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 /*
2653 * Register, memory.
2654 */
2655 IEM_MC_BEGIN(2, 2);
2656 IEM_MC_ARG(uint128_t *, pDst, 0);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2660
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665
2666 IEM_MC_PREPARE_SSE_USAGE();
2667 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2668 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2669
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674}
2675
2676
2677/**
2678 * Common worker for SSE2 instructions on the forms:
2679 * pxxxx xmm1, xmm2/mem128
2680 *
2681 * The 2nd operand is the first half of a register, which in the memory case
2682 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2683 * memory accessed for MMX.
2684 *
2685 * Exceptions type 4.
2686 */
2687FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2688{
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (!pImpl->pfnU64)
2691 return IEMOP_RAISE_INVALID_OPCODE();
2692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2693 {
2694 /*
2695 * Register, register.
2696 */
2697 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2698 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(2, 0);
2701 IEM_MC_ARG(uint64_t *, pDst, 0);
2702 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_PREPARE_FPU_USAGE();
2705 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2706 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2707 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * Register, memory.
2715 */
2716 IEM_MC_BEGIN(2, 2);
2717 IEM_MC_ARG(uint64_t *, pDst, 0);
2718 IEM_MC_LOCAL(uint32_t, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2721
2722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2725 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726
2727 IEM_MC_PREPARE_FPU_USAGE();
2728 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2729 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2730
2731 IEM_MC_ADVANCE_RIP();
2732 IEM_MC_END();
2733 }
2734 return VINF_SUCCESS;
2735}
2736
2737
2738/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2739FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2740{
2741 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2742 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2743}
2744
2745/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2746FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2747{
2748 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2749 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2750}
2751
2752/* Opcode 0xf3 0x0f 0x60 - invalid */
2753
2754
2755/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2756FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2757{
2758 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2759 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2760}
2761
2762/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2763FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2764{
2765 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2766 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2767}
2768
2769/* Opcode 0xf3 0x0f 0x61 - invalid */
2770
2771
2772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2774{
2775 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2777}
2778
2779/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2780FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2781{
2782 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2783 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2784}
2785
2786/* Opcode 0xf3 0x0f 0x62 - invalid */
2787
2788
2789
2790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2791FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x63 - invalid */
2795
2796/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x64 - invalid */
2801
2802/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2805FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2806/* Opcode 0xf3 0x0f 0x65 - invalid */
2807
2808/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2809FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2811FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2812/* Opcode 0xf3 0x0f 0x66 - invalid */
2813
2814/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2815FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2816/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2818/* Opcode 0xf3 0x0f 0x67 - invalid */
2819
2820
2821/**
2822 * Common worker for MMX instructions on the form:
2823 * pxxxx mm1, mm2/mem64
2824 *
2825 * The 2nd operand is the second half of a register, which in the memory case
2826 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2827 * where it may read the full 128 bits or only the upper 64 bits.
2828 *
2829 * Exceptions type 4.
2830 */
2831FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2832{
2833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2834 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /*
2838 * Register, register.
2839 */
2840 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2841 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEM_MC_BEGIN(2, 0);
2844 IEM_MC_ARG(uint64_t *, pDst, 0);
2845 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2846 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2847 IEM_MC_PREPARE_FPU_USAGE();
2848 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2849 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2850 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 else
2855 {
2856 /*
2857 * Register, memory.
2858 */
2859 IEM_MC_BEGIN(2, 2);
2860 IEM_MC_ARG(uint64_t *, pDst, 0);
2861 IEM_MC_LOCAL(uint64_t, uSrc);
2862 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2864
2865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2868 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2869
2870 IEM_MC_PREPARE_FPU_USAGE();
2871 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2872 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2873
2874 IEM_MC_ADVANCE_RIP();
2875 IEM_MC_END();
2876 }
2877 return VINF_SUCCESS;
2878}
2879
2880
2881/**
2882 * Common worker for SSE2 instructions on the form:
2883 * pxxxx xmm1, xmm2/mem128
2884 *
2885 * The 2nd operand is the second half of a register, which in the memory case
2886 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2887 * where it may read the full 128 bits or only the upper 64 bits.
2888 *
2889 * Exceptions type 4.
2890 */
2891FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2892{
2893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2895 {
2896 /*
2897 * Register, register.
2898 */
2899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2900 IEM_MC_BEGIN(2, 0);
2901 IEM_MC_ARG(uint128_t *, pDst, 0);
2902 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2903 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2904 IEM_MC_PREPARE_SSE_USAGE();
2905 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2906 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2907 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2908 IEM_MC_ADVANCE_RIP();
2909 IEM_MC_END();
2910 }
2911 else
2912 {
2913 /*
2914 * Register, memory.
2915 */
2916 IEM_MC_BEGIN(2, 2);
2917 IEM_MC_ARG(uint128_t *, pDst, 0);
2918 IEM_MC_LOCAL(uint128_t, uSrc);
2919 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2921
2922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2924 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2925 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2926
2927 IEM_MC_PREPARE_SSE_USAGE();
2928 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2929 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
2939FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
2940{
2941 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
2942 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2943}
2944
2945/** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */
2946FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2947{
2948 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2949 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2950}
2951/* Opcode 0xf3 0x0f 0x68 - invalid */
2952
2953
2954/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
2955FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
2956{
2957 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
2958 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2959}
2960
2961/** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */
2962FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2963{
2964 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2965 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2966
2967}
2968/* Opcode 0xf3 0x0f 0x69 - invalid */
2969
2970
2971/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
2972FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
2973{
2974 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
2975 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2976}
2977
2978/** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */
2979FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2980{
2981 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2982 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2983}
2984/* Opcode 0xf3 0x0f 0x6a - invalid */
2985
2986
2987/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
2988FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
2989/** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */
2990FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2991/* Opcode 0xf3 0x0f 0x6b - invalid */
2992
2993
2994/* Opcode 0x0f 0x6c - invalid */
2995
2996/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2997FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2998{
2999 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
3000 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3001}
3002
3003/* Opcode 0xf3 0x0f 0x6c - invalid */
3004/* Opcode 0xf2 0x0f 0x6c - invalid */
3005
3006
3007/* Opcode 0x0f 0x6d - invalid */
3008
3009/** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */
3010FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3011{
3012 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
3013 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3014}
3015
3016/* Opcode 0xf3 0x0f 0x6d - invalid */
3017
3018
3019/** Opcode 0x0f 0x6e - movd/q Pd, Ey */
3020FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3021{
3022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3023 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3024 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3025 else
3026 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3028 {
3029 /* MMX, greg */
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_BEGIN(0, 1);
3032 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3037 else
3038 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3039 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3040 IEM_MC_ADVANCE_RIP();
3041 IEM_MC_END();
3042 }
3043 else
3044 {
3045 /* MMX, [mem] */
3046 IEM_MC_BEGIN(0, 2);
3047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3052 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3053 {
3054 IEM_MC_LOCAL(uint64_t, u64Tmp);
3055 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3056 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3057 }
3058 else
3059 {
3060 IEM_MC_LOCAL(uint32_t, u32Tmp);
3061 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3062 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3063 }
3064 IEM_MC_ADVANCE_RIP();
3065 IEM_MC_END();
3066 }
3067 return VINF_SUCCESS;
3068}
3069
3070/** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */
3071FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3072{
3073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3074 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3075 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
3076 else
3077 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
3078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3079 {
3080 /* XMM, greg*/
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_BEGIN(0, 1);
3083 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3084 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3085 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3086 {
3087 IEM_MC_LOCAL(uint64_t, u64Tmp);
3088 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3089 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3090 }
3091 else
3092 {
3093 IEM_MC_LOCAL(uint32_t, u32Tmp);
3094 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3095 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3096 }
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 else
3101 {
3102 /* XMM, [mem] */
3103 IEM_MC_BEGIN(0, 2);
3104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3105 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3108 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3110 {
3111 IEM_MC_LOCAL(uint64_t, u64Tmp);
3112 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3113 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3114 }
3115 else
3116 {
3117 IEM_MC_LOCAL(uint32_t, u32Tmp);
3118 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3119 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3120 }
3121 IEM_MC_ADVANCE_RIP();
3122 IEM_MC_END();
3123 }
3124 return VINF_SUCCESS;
3125}
3126
3127/* Opcode 0xf3 0x0f 0x6e - invalid */
3128
3129
3130/** Opcode 0x0f 0x6f - movq Pq, Qq */
3131FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3132{
3133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3134 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3136 {
3137 /*
3138 * Register, register.
3139 */
3140 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3141 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3143 IEM_MC_BEGIN(0, 1);
3144 IEM_MC_LOCAL(uint64_t, u64Tmp);
3145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3146 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3147 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3148 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3149 IEM_MC_ADVANCE_RIP();
3150 IEM_MC_END();
3151 }
3152 else
3153 {
3154 /*
3155 * Register, memory.
3156 */
3157 IEM_MC_BEGIN(0, 2);
3158 IEM_MC_LOCAL(uint64_t, u64Tmp);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3164 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3165 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3166 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3167
3168 IEM_MC_ADVANCE_RIP();
3169 IEM_MC_END();
3170 }
3171 return VINF_SUCCESS;
3172}
3173
3174/** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */
3175FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3176{
3177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3178 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3180 {
3181 /*
3182 * Register, register.
3183 */
3184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3185 IEM_MC_BEGIN(0, 0);
3186 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3187 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3188 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3189 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3190 IEM_MC_ADVANCE_RIP();
3191 IEM_MC_END();
3192 }
3193 else
3194 {
3195 /*
3196 * Register, memory.
3197 */
3198 IEM_MC_BEGIN(0, 2);
3199 IEM_MC_LOCAL(uint128_t, u128Tmp);
3200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3201
3202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3205 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3206 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3207 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3208
3209 IEM_MC_ADVANCE_RIP();
3210 IEM_MC_END();
3211 }
3212 return VINF_SUCCESS;
3213}
3214
3215/** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */
3216FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3217{
3218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3219 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3221 {
3222 /*
3223 * Register, register.
3224 */
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226 IEM_MC_BEGIN(0, 0);
3227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3229 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3230 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3231 IEM_MC_ADVANCE_RIP();
3232 IEM_MC_END();
3233 }
3234 else
3235 {
3236 /*
3237 * Register, memory.
3238 */
3239 IEM_MC_BEGIN(0, 2);
3240 IEM_MC_LOCAL(uint128_t, u128Tmp);
3241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3242
3243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3247 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3248 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3249
3250 IEM_MC_ADVANCE_RIP();
3251 IEM_MC_END();
3252 }
3253 return VINF_SUCCESS;
3254}
3255
3256
3257/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3258FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3259{
3260 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3263 {
3264 /*
3265 * Register, register.
3266 */
3267 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3269
3270 IEM_MC_BEGIN(3, 0);
3271 IEM_MC_ARG(uint64_t *, pDst, 0);
3272 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3273 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3274 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3275 IEM_MC_PREPARE_FPU_USAGE();
3276 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3277 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3278 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3279 IEM_MC_ADVANCE_RIP();
3280 IEM_MC_END();
3281 }
3282 else
3283 {
3284 /*
3285 * Register, memory.
3286 */
3287 IEM_MC_BEGIN(3, 2);
3288 IEM_MC_ARG(uint64_t *, pDst, 0);
3289 IEM_MC_LOCAL(uint64_t, uSrc);
3290 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3292
3293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3294 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3295 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3297 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3298
3299 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3300 IEM_MC_PREPARE_FPU_USAGE();
3301 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3302 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3303
3304 IEM_MC_ADVANCE_RIP();
3305 IEM_MC_END();
3306 }
3307 return VINF_SUCCESS;
3308}
3309
3310/** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */
3311FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3312{
3313 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
3314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3316 {
3317 /*
3318 * Register, register.
3319 */
3320 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3322
3323 IEM_MC_BEGIN(3, 0);
3324 IEM_MC_ARG(uint128_t *, pDst, 0);
3325 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3326 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3327 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3328 IEM_MC_PREPARE_SSE_USAGE();
3329 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3330 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3331 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3332 IEM_MC_ADVANCE_RIP();
3333 IEM_MC_END();
3334 }
3335 else
3336 {
3337 /*
3338 * Register, memory.
3339 */
3340 IEM_MC_BEGIN(3, 2);
3341 IEM_MC_ARG(uint128_t *, pDst, 0);
3342 IEM_MC_LOCAL(uint128_t, uSrc);
3343 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3345
3346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3347 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3348 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3350 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3351
3352 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3353 IEM_MC_PREPARE_SSE_USAGE();
3354 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3355 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3356
3357 IEM_MC_ADVANCE_RIP();
3358 IEM_MC_END();
3359 }
3360 return VINF_SUCCESS;
3361}
3362
3363/** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */
3364FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3365{
3366 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
3367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3369 {
3370 /*
3371 * Register, register.
3372 */
3373 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3375
3376 IEM_MC_BEGIN(3, 0);
3377 IEM_MC_ARG(uint128_t *, pDst, 0);
3378 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3379 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3380 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3381 IEM_MC_PREPARE_SSE_USAGE();
3382 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3383 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3384 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3385 IEM_MC_ADVANCE_RIP();
3386 IEM_MC_END();
3387 }
3388 else
3389 {
3390 /*
3391 * Register, memory.
3392 */
3393 IEM_MC_BEGIN(3, 2);
3394 IEM_MC_ARG(uint128_t *, pDst, 0);
3395 IEM_MC_LOCAL(uint128_t, uSrc);
3396 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3398
3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3400 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3401 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3403 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3404
3405 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3406 IEM_MC_PREPARE_SSE_USAGE();
3407 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3408 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3409
3410 IEM_MC_ADVANCE_RIP();
3411 IEM_MC_END();
3412 }
3413 return VINF_SUCCESS;
3414}
3415
3416/** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */
3417FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3418{
3419 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
3420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3422 {
3423 /*
3424 * Register, register.
3425 */
3426 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3428
3429 IEM_MC_BEGIN(3, 0);
3430 IEM_MC_ARG(uint128_t *, pDst, 0);
3431 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3432 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3434 IEM_MC_PREPARE_SSE_USAGE();
3435 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3436 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3437 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 }
3441 else
3442 {
3443 /*
3444 * Register, memory.
3445 */
3446 IEM_MC_BEGIN(3, 2);
3447 IEM_MC_ARG(uint128_t *, pDst, 0);
3448 IEM_MC_LOCAL(uint128_t, uSrc);
3449 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3451
3452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3453 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3454 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3456 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3457
3458 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3459 IEM_MC_PREPARE_SSE_USAGE();
3460 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3461 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3462
3463 IEM_MC_ADVANCE_RIP();
3464 IEM_MC_END();
3465 }
3466 return VINF_SUCCESS;
3467}
3468
3469
3470/** Opcode 0x0f 0x71 11/2. */
3471FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3472
3473/** Opcode 0x66 0x0f 0x71 11/2. */
3474FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3475
3476/** Opcode 0x0f 0x71 11/4. */
3477FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3478
3479/** Opcode 0x66 0x0f 0x71 11/4. */
3480FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3481
3482/** Opcode 0x0f 0x71 11/6. */
3483FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3484
3485/** Opcode 0x66 0x0f 0x71 11/6. */
3486FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3487
3488
3489/** Opcode 0x0f 0x71. */
3490FNIEMOP_DEF(iemOp_Grp12)
3491{
3492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3493 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3494 return IEMOP_RAISE_INVALID_OPCODE();
3495 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3496 {
3497 case 0: case 1: case 3: case 5: case 7:
3498 return IEMOP_RAISE_INVALID_OPCODE();
3499 case 2:
3500 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3501 {
3502 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3503 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3504 default: return IEMOP_RAISE_INVALID_OPCODE();
3505 }
3506 case 4:
3507 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3508 {
3509 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3510 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3511 default: return IEMOP_RAISE_INVALID_OPCODE();
3512 }
3513 case 6:
3514 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3515 {
3516 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3517 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3518 default: return IEMOP_RAISE_INVALID_OPCODE();
3519 }
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522}
3523
3524
3525/** Opcode 0x0f 0x72 11/2. */
3526FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3527
3528/** Opcode 0x66 0x0f 0x72 11/2. */
3529FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3530
3531/** Opcode 0x0f 0x72 11/4. */
3532FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3533
3534/** Opcode 0x66 0x0f 0x72 11/4. */
3535FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3536
3537/** Opcode 0x0f 0x72 11/6. */
3538FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3539
3540/** Opcode 0x66 0x0f 0x72 11/6. */
3541FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3542
3543
3544/** Opcode 0x0f 0x72. */
3545FNIEMOP_DEF(iemOp_Grp13)
3546{
3547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3548 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3549 return IEMOP_RAISE_INVALID_OPCODE();
3550 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3551 {
3552 case 0: case 1: case 3: case 5: case 7:
3553 return IEMOP_RAISE_INVALID_OPCODE();
3554 case 2:
3555 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3556 {
3557 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3558 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3559 default: return IEMOP_RAISE_INVALID_OPCODE();
3560 }
3561 case 4:
3562 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3563 {
3564 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3565 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3566 default: return IEMOP_RAISE_INVALID_OPCODE();
3567 }
3568 case 6:
3569 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3570 {
3571 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3572 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3573 default: return IEMOP_RAISE_INVALID_OPCODE();
3574 }
3575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3576 }
3577}
3578
3579
3580/** Opcode 0x0f 0x73 11/2. */
3581FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3582
3583/** Opcode 0x66 0x0f 0x73 11/2. */
3584FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3585
3586/** Opcode 0x66 0x0f 0x73 11/3. */
3587FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3588
3589/** Opcode 0x0f 0x73 11/6. */
3590FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3591
3592/** Opcode 0x66 0x0f 0x73 11/6. */
3593FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3594
3595/** Opcode 0x66 0x0f 0x73 11/7. */
3596FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3597
3598
3599/** Opcode 0x0f 0x73. */
3600FNIEMOP_DEF(iemOp_Grp14)
3601{
3602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3603 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3604 return IEMOP_RAISE_INVALID_OPCODE();
3605 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3606 {
3607 case 0: case 1: case 4: case 5:
3608 return IEMOP_RAISE_INVALID_OPCODE();
3609 case 2:
3610 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3611 {
3612 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3613 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3614 default: return IEMOP_RAISE_INVALID_OPCODE();
3615 }
3616 case 3:
3617 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3618 {
3619 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3620 default: return IEMOP_RAISE_INVALID_OPCODE();
3621 }
3622 case 6:
3623 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3624 {
3625 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3626 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3627 default: return IEMOP_RAISE_INVALID_OPCODE();
3628 }
3629 case 7:
3630 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3631 {
3632 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3633 default: return IEMOP_RAISE_INVALID_OPCODE();
3634 }
3635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3636 }
3637}
3638
3639
3640/**
3641 * Common worker for MMX instructions on the form:
3642 * pxxx mm1, mm2/mem64
3643 */
3644FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3645{
3646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3648 {
3649 /*
3650 * Register, register.
3651 */
3652 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3653 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3655 IEM_MC_BEGIN(2, 0);
3656 IEM_MC_ARG(uint64_t *, pDst, 0);
3657 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3658 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3659 IEM_MC_PREPARE_FPU_USAGE();
3660 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3661 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3662 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 /*
3669 * Register, memory.
3670 */
3671 IEM_MC_BEGIN(2, 2);
3672 IEM_MC_ARG(uint64_t *, pDst, 0);
3673 IEM_MC_LOCAL(uint64_t, uSrc);
3674 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3676
3677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3679 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3680 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3681
3682 IEM_MC_PREPARE_FPU_USAGE();
3683 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3684 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3685
3686 IEM_MC_ADVANCE_RIP();
3687 IEM_MC_END();
3688 }
3689 return VINF_SUCCESS;
3690}
3691
3692
3693/**
3694 * Common worker for SSE2 instructions on the forms:
3695 * pxxx xmm1, xmm2/mem128
3696 *
3697 * Proper alignment of the 128-bit operand is enforced.
3698 * Exceptions type 4. SSE2 cpuid checks.
3699 */
3700FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3701{
3702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3703 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3704 {
3705 /*
3706 * Register, register.
3707 */
3708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3709 IEM_MC_BEGIN(2, 0);
3710 IEM_MC_ARG(uint128_t *, pDst, 0);
3711 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3712 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3713 IEM_MC_PREPARE_SSE_USAGE();
3714 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3715 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3716 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3717 IEM_MC_ADVANCE_RIP();
3718 IEM_MC_END();
3719 }
3720 else
3721 {
3722 /*
3723 * Register, memory.
3724 */
3725 IEM_MC_BEGIN(2, 2);
3726 IEM_MC_ARG(uint128_t *, pDst, 0);
3727 IEM_MC_LOCAL(uint128_t, uSrc);
3728 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3730
3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3733 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3734 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3735
3736 IEM_MC_PREPARE_SSE_USAGE();
3737 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3738 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3739
3740 IEM_MC_ADVANCE_RIP();
3741 IEM_MC_END();
3742 }
3743 return VINF_SUCCESS;
3744}
3745
3746
3747/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
3748FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
3749{
3750 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3751 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3752}
3753
3754/** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */
3755FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3756{
3757 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
3758 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3759}
3760
3761/* Opcode 0xf3 0x0f 0x74 - invalid */
3762/* Opcode 0xf2 0x0f 0x74 - invalid */
3763
3764
3765/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
3766FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
3767{
3768 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3769 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3770}
3771
3772/** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */
3773FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3774{
3775 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
3776 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3777}
3778
3779/* Opcode 0xf3 0x0f 0x75 - invalid */
3780/* Opcode 0xf2 0x0f 0x75 - invalid */
3781
3782
3783/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
3784FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
3785{
3786 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3787 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3788}
3789
3790/** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */
3791FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3792{
3793 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
3794 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3795}
3796
3797/* Opcode 0xf3 0x0f 0x76 - invalid */
3798/* Opcode 0xf2 0x0f 0x76 - invalid */
3799
3800
3801/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3802FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3803/* Opcode 0x66 0x0f 0x77 - invalid */
3804/* Opcode 0xf3 0x0f 0x77 - invalid */
3805/* Opcode 0xf2 0x0f 0x77 - invalid */
3806
3807/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3808FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3809/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3810FNIEMOP_STUB(iemOp_AmdGrp17);
3811/* Opcode 0xf3 0x0f 0x78 - invalid */
3812/* Opcode 0xf2 0x0f 0x78 - invalid */
3813
3814/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3815FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3816/* Opcode 0x66 0x0f 0x79 - invalid */
3817/* Opcode 0xf3 0x0f 0x79 - invalid */
3818/* Opcode 0xf2 0x0f 0x79 - invalid */
3819
3820/* Opcode 0x0f 0x7a - invalid */
3821/* Opcode 0x66 0x0f 0x7a - invalid */
3822/* Opcode 0xf3 0x0f 0x7a - invalid */
3823/* Opcode 0xf2 0x0f 0x7a - invalid */
3824
3825/* Opcode 0x0f 0x7b - invalid */
3826/* Opcode 0x66 0x0f 0x7b - invalid */
3827/* Opcode 0xf3 0x0f 0x7b - invalid */
3828/* Opcode 0xf2 0x0f 0x7b - invalid */
3829
3830/* Opcode 0x0f 0x7c - invalid */
3831/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3832FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3833/* Opcode 0xf3 0x0f 0x7c - invalid */
3834/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3835FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3836
3837/* Opcode 0x0f 0x7d - invalid */
3838/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3839FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3840/* Opcode 0xf3 0x0f 0x7d - invalid */
3841/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3842FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3843
3844
3845/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
3846FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
3847{
3848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3849 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3850 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3851 else
3852 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3853 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3854 {
3855 /* greg, MMX */
3856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3857 IEM_MC_BEGIN(0, 1);
3858 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3859 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3860 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3861 {
3862 IEM_MC_LOCAL(uint64_t, u64Tmp);
3863 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3864 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3865 }
3866 else
3867 {
3868 IEM_MC_LOCAL(uint32_t, u32Tmp);
3869 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3870 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3871 }
3872 IEM_MC_ADVANCE_RIP();
3873 IEM_MC_END();
3874 }
3875 else
3876 {
3877 /* [mem], MMX */
3878 IEM_MC_BEGIN(0, 2);
3879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3880 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3883 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3884 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3885 {
3886 IEM_MC_LOCAL(uint64_t, u64Tmp);
3887 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3888 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3889 }
3890 else
3891 {
3892 IEM_MC_LOCAL(uint32_t, u32Tmp);
3893 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3894 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3895 }
3896 IEM_MC_ADVANCE_RIP();
3897 IEM_MC_END();
3898 }
3899 return VINF_SUCCESS;
3900}
3901
3902/** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */
3903FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3904{
3905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3906 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3907 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
3908 else
3909 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
3910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3911 {
3912 /* greg, XMM */
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3914 IEM_MC_BEGIN(0, 1);
3915 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3916 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3917 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3918 {
3919 IEM_MC_LOCAL(uint64_t, u64Tmp);
3920 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3921 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3922 }
3923 else
3924 {
3925 IEM_MC_LOCAL(uint32_t, u32Tmp);
3926 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3927 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3928 }
3929 IEM_MC_ADVANCE_RIP();
3930 IEM_MC_END();
3931 }
3932 else
3933 {
3934 /* [mem], XMM */
3935 IEM_MC_BEGIN(0, 2);
3936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3937 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3941 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3942 {
3943 IEM_MC_LOCAL(uint64_t, u64Tmp);
3944 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3946 }
3947 else
3948 {
3949 IEM_MC_LOCAL(uint32_t, u32Tmp);
3950 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3951 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3952 }
3953 IEM_MC_ADVANCE_RIP();
3954 IEM_MC_END();
3955 }
3956 return VINF_SUCCESS;
3957}
3958
3959/** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */
3960FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
3961/* Opcode 0xf2 0x0f 0x7e - invalid */
3962
3963
3964/** Opcode 0x0f 0x7f - movq Qq, Pq */
3965FNIEMOP_DEF(iemOp_movq_Qq_Pq)
3966{
3967 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3969 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3970 {
3971 /*
3972 * Register, register.
3973 */
3974 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3975 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3977 IEM_MC_BEGIN(0, 1);
3978 IEM_MC_LOCAL(uint64_t, u64Tmp);
3979 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3980 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3981 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3982 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3983 IEM_MC_ADVANCE_RIP();
3984 IEM_MC_END();
3985 }
3986 else
3987 {
3988 /*
3989 * Register, memory.
3990 */
3991 IEM_MC_BEGIN(0, 2);
3992 IEM_MC_LOCAL(uint64_t, u64Tmp);
3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3994
3995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3997 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3998 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3999
4000 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4001 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4002
4003 IEM_MC_ADVANCE_RIP();
4004 IEM_MC_END();
4005 }
4006 return VINF_SUCCESS;
4007}
4008
4009/** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */
4010FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4011{
4012 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
4013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4015 {
4016 /*
4017 * Register, register.
4018 */
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020 IEM_MC_BEGIN(0, 0);
4021 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4023 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4024 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4025 IEM_MC_ADVANCE_RIP();
4026 IEM_MC_END();
4027 }
4028 else
4029 {
4030 /*
4031 * Register, memory.
4032 */
4033 IEM_MC_BEGIN(0, 2);
4034 IEM_MC_LOCAL(uint128_t, u128Tmp);
4035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4036
4037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4039 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4040 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4041
4042 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4043 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4044
4045 IEM_MC_ADVANCE_RIP();
4046 IEM_MC_END();
4047 }
4048 return VINF_SUCCESS;
4049}
4050
4051/** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */
4052FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4053{
4054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4055 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
4056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4057 {
4058 /*
4059 * Register, register.
4060 */
4061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4062 IEM_MC_BEGIN(0, 0);
4063 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4065 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4066 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4067 IEM_MC_ADVANCE_RIP();
4068 IEM_MC_END();
4069 }
4070 else
4071 {
4072 /*
4073 * Register, memory.
4074 */
4075 IEM_MC_BEGIN(0, 2);
4076 IEM_MC_LOCAL(uint128_t, u128Tmp);
4077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4078
4079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4081 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4082 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4083
4084 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4085 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4086
4087 IEM_MC_ADVANCE_RIP();
4088 IEM_MC_END();
4089 }
4090 return VINF_SUCCESS;
4091}
4092
4093/* Opcode 0xf2 0x0f 0x7f - invalid */
4094
4095
4096
4097/** Opcode 0x0f 0x80. */
4098FNIEMOP_DEF(iemOp_jo_Jv)
4099{
4100 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4101 IEMOP_HLP_MIN_386();
4102 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4103 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4104 {
4105 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4107
4108 IEM_MC_BEGIN(0, 0);
4109 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4110 IEM_MC_REL_JMP_S16(i16Imm);
4111 } IEM_MC_ELSE() {
4112 IEM_MC_ADVANCE_RIP();
4113 } IEM_MC_ENDIF();
4114 IEM_MC_END();
4115 }
4116 else
4117 {
4118 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120
4121 IEM_MC_BEGIN(0, 0);
4122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4123 IEM_MC_REL_JMP_S32(i32Imm);
4124 } IEM_MC_ELSE() {
4125 IEM_MC_ADVANCE_RIP();
4126 } IEM_MC_ENDIF();
4127 IEM_MC_END();
4128 }
4129 return VINF_SUCCESS;
4130}
4131
4132
4133/** Opcode 0x0f 0x81. */
4134FNIEMOP_DEF(iemOp_jno_Jv)
4135{
4136 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4137 IEMOP_HLP_MIN_386();
4138 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4139 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4140 {
4141 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4143
4144 IEM_MC_BEGIN(0, 0);
4145 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4146 IEM_MC_ADVANCE_RIP();
4147 } IEM_MC_ELSE() {
4148 IEM_MC_REL_JMP_S16(i16Imm);
4149 } IEM_MC_ENDIF();
4150 IEM_MC_END();
4151 }
4152 else
4153 {
4154 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4156
4157 IEM_MC_BEGIN(0, 0);
4158 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4159 IEM_MC_ADVANCE_RIP();
4160 } IEM_MC_ELSE() {
4161 IEM_MC_REL_JMP_S32(i32Imm);
4162 } IEM_MC_ENDIF();
4163 IEM_MC_END();
4164 }
4165 return VINF_SUCCESS;
4166}
4167
4168
4169/** Opcode 0x0f 0x82. */
4170FNIEMOP_DEF(iemOp_jc_Jv)
4171{
4172 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4173 IEMOP_HLP_MIN_386();
4174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4175 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4176 {
4177 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4179
4180 IEM_MC_BEGIN(0, 0);
4181 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4182 IEM_MC_REL_JMP_S16(i16Imm);
4183 } IEM_MC_ELSE() {
4184 IEM_MC_ADVANCE_RIP();
4185 } IEM_MC_ENDIF();
4186 IEM_MC_END();
4187 }
4188 else
4189 {
4190 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4192
4193 IEM_MC_BEGIN(0, 0);
4194 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4195 IEM_MC_REL_JMP_S32(i32Imm);
4196 } IEM_MC_ELSE() {
4197 IEM_MC_ADVANCE_RIP();
4198 } IEM_MC_ENDIF();
4199 IEM_MC_END();
4200 }
4201 return VINF_SUCCESS;
4202}
4203
4204
4205/** Opcode 0x0f 0x83. */
4206FNIEMOP_DEF(iemOp_jnc_Jv)
4207{
4208 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4209 IEMOP_HLP_MIN_386();
4210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4211 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4212 {
4213 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4215
4216 IEM_MC_BEGIN(0, 0);
4217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4218 IEM_MC_ADVANCE_RIP();
4219 } IEM_MC_ELSE() {
4220 IEM_MC_REL_JMP_S16(i16Imm);
4221 } IEM_MC_ENDIF();
4222 IEM_MC_END();
4223 }
4224 else
4225 {
4226 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4228
4229 IEM_MC_BEGIN(0, 0);
4230 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4231 IEM_MC_ADVANCE_RIP();
4232 } IEM_MC_ELSE() {
4233 IEM_MC_REL_JMP_S32(i32Imm);
4234 } IEM_MC_ENDIF();
4235 IEM_MC_END();
4236 }
4237 return VINF_SUCCESS;
4238}
4239
4240
4241/** Opcode 0x0f 0x84. */
4242FNIEMOP_DEF(iemOp_je_Jv)
4243{
4244 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4245 IEMOP_HLP_MIN_386();
4246 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4247 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4248 {
4249 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4251
4252 IEM_MC_BEGIN(0, 0);
4253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4254 IEM_MC_REL_JMP_S16(i16Imm);
4255 } IEM_MC_ELSE() {
4256 IEM_MC_ADVANCE_RIP();
4257 } IEM_MC_ENDIF();
4258 IEM_MC_END();
4259 }
4260 else
4261 {
4262 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4264
4265 IEM_MC_BEGIN(0, 0);
4266 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4267 IEM_MC_REL_JMP_S32(i32Imm);
4268 } IEM_MC_ELSE() {
4269 IEM_MC_ADVANCE_RIP();
4270 } IEM_MC_ENDIF();
4271 IEM_MC_END();
4272 }
4273 return VINF_SUCCESS;
4274}
4275
4276
4277/** Opcode 0x0f 0x85. */
4278FNIEMOP_DEF(iemOp_jne_Jv)
4279{
4280 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4281 IEMOP_HLP_MIN_386();
4282 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4283 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4284 {
4285 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4287
4288 IEM_MC_BEGIN(0, 0);
4289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4290 IEM_MC_ADVANCE_RIP();
4291 } IEM_MC_ELSE() {
4292 IEM_MC_REL_JMP_S16(i16Imm);
4293 } IEM_MC_ENDIF();
4294 IEM_MC_END();
4295 }
4296 else
4297 {
4298 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4300
4301 IEM_MC_BEGIN(0, 0);
4302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4303 IEM_MC_ADVANCE_RIP();
4304 } IEM_MC_ELSE() {
4305 IEM_MC_REL_JMP_S32(i32Imm);
4306 } IEM_MC_ENDIF();
4307 IEM_MC_END();
4308 }
4309 return VINF_SUCCESS;
4310}
4311
4312
4313/** Opcode 0x0f 0x86. */
4314FNIEMOP_DEF(iemOp_jbe_Jv)
4315{
4316 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4317 IEMOP_HLP_MIN_386();
4318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4319 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4320 {
4321 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4323
4324 IEM_MC_BEGIN(0, 0);
4325 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4326 IEM_MC_REL_JMP_S16(i16Imm);
4327 } IEM_MC_ELSE() {
4328 IEM_MC_ADVANCE_RIP();
4329 } IEM_MC_ENDIF();
4330 IEM_MC_END();
4331 }
4332 else
4333 {
4334 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4336
4337 IEM_MC_BEGIN(0, 0);
4338 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4339 IEM_MC_REL_JMP_S32(i32Imm);
4340 } IEM_MC_ELSE() {
4341 IEM_MC_ADVANCE_RIP();
4342 } IEM_MC_ENDIF();
4343 IEM_MC_END();
4344 }
4345 return VINF_SUCCESS;
4346}
4347
4348
4349/** Opcode 0x0f 0x87. */
4350FNIEMOP_DEF(iemOp_jnbe_Jv)
4351{
4352 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4353 IEMOP_HLP_MIN_386();
4354 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4355 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4356 {
4357 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4359
4360 IEM_MC_BEGIN(0, 0);
4361 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4362 IEM_MC_ADVANCE_RIP();
4363 } IEM_MC_ELSE() {
4364 IEM_MC_REL_JMP_S16(i16Imm);
4365 } IEM_MC_ENDIF();
4366 IEM_MC_END();
4367 }
4368 else
4369 {
4370 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372
4373 IEM_MC_BEGIN(0, 0);
4374 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4375 IEM_MC_ADVANCE_RIP();
4376 } IEM_MC_ELSE() {
4377 IEM_MC_REL_JMP_S32(i32Imm);
4378 } IEM_MC_ENDIF();
4379 IEM_MC_END();
4380 }
4381 return VINF_SUCCESS;
4382}
4383
4384
4385/** Opcode 0x0f 0x88. */
4386FNIEMOP_DEF(iemOp_js_Jv)
4387{
4388 IEMOP_MNEMONIC(js_Jv, "js Jv");
4389 IEMOP_HLP_MIN_386();
4390 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4391 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4392 {
4393 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4395
4396 IEM_MC_BEGIN(0, 0);
4397 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4398 IEM_MC_REL_JMP_S16(i16Imm);
4399 } IEM_MC_ELSE() {
4400 IEM_MC_ADVANCE_RIP();
4401 } IEM_MC_ENDIF();
4402 IEM_MC_END();
4403 }
4404 else
4405 {
4406 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4408
4409 IEM_MC_BEGIN(0, 0);
4410 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4411 IEM_MC_REL_JMP_S32(i32Imm);
4412 } IEM_MC_ELSE() {
4413 IEM_MC_ADVANCE_RIP();
4414 } IEM_MC_ENDIF();
4415 IEM_MC_END();
4416 }
4417 return VINF_SUCCESS;
4418}
4419
4420
4421/** Opcode 0x0f 0x89. */
4422FNIEMOP_DEF(iemOp_jns_Jv)
4423{
4424 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4425 IEMOP_HLP_MIN_386();
4426 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4427 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4428 {
4429 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4431
4432 IEM_MC_BEGIN(0, 0);
4433 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4434 IEM_MC_ADVANCE_RIP();
4435 } IEM_MC_ELSE() {
4436 IEM_MC_REL_JMP_S16(i16Imm);
4437 } IEM_MC_ENDIF();
4438 IEM_MC_END();
4439 }
4440 else
4441 {
4442 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4444
4445 IEM_MC_BEGIN(0, 0);
4446 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4447 IEM_MC_ADVANCE_RIP();
4448 } IEM_MC_ELSE() {
4449 IEM_MC_REL_JMP_S32(i32Imm);
4450 } IEM_MC_ENDIF();
4451 IEM_MC_END();
4452 }
4453 return VINF_SUCCESS;
4454}
4455
4456
4457/** Opcode 0x0f 0x8a. */
4458FNIEMOP_DEF(iemOp_jp_Jv)
4459{
4460 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4461 IEMOP_HLP_MIN_386();
4462 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4463 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4464 {
4465 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4467
4468 IEM_MC_BEGIN(0, 0);
4469 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4470 IEM_MC_REL_JMP_S16(i16Imm);
4471 } IEM_MC_ELSE() {
4472 IEM_MC_ADVANCE_RIP();
4473 } IEM_MC_ENDIF();
4474 IEM_MC_END();
4475 }
4476 else
4477 {
4478 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4480
4481 IEM_MC_BEGIN(0, 0);
4482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4483 IEM_MC_REL_JMP_S32(i32Imm);
4484 } IEM_MC_ELSE() {
4485 IEM_MC_ADVANCE_RIP();
4486 } IEM_MC_ENDIF();
4487 IEM_MC_END();
4488 }
4489 return VINF_SUCCESS;
4490}
4491
4492
4493/** Opcode 0x0f 0x8b. */
4494FNIEMOP_DEF(iemOp_jnp_Jv)
4495{
4496 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4497 IEMOP_HLP_MIN_386();
4498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4499 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4500 {
4501 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4503
4504 IEM_MC_BEGIN(0, 0);
4505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4506 IEM_MC_ADVANCE_RIP();
4507 } IEM_MC_ELSE() {
4508 IEM_MC_REL_JMP_S16(i16Imm);
4509 } IEM_MC_ENDIF();
4510 IEM_MC_END();
4511 }
4512 else
4513 {
4514 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4516
4517 IEM_MC_BEGIN(0, 0);
4518 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4519 IEM_MC_ADVANCE_RIP();
4520 } IEM_MC_ELSE() {
4521 IEM_MC_REL_JMP_S32(i32Imm);
4522 } IEM_MC_ENDIF();
4523 IEM_MC_END();
4524 }
4525 return VINF_SUCCESS;
4526}
4527
4528
4529/** Opcode 0x0f 0x8c. */
4530FNIEMOP_DEF(iemOp_jl_Jv)
4531{
4532 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4533 IEMOP_HLP_MIN_386();
4534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4535 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4536 {
4537 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4539
4540 IEM_MC_BEGIN(0, 0);
4541 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4542 IEM_MC_REL_JMP_S16(i16Imm);
4543 } IEM_MC_ELSE() {
4544 IEM_MC_ADVANCE_RIP();
4545 } IEM_MC_ENDIF();
4546 IEM_MC_END();
4547 }
4548 else
4549 {
4550 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4552
4553 IEM_MC_BEGIN(0, 0);
4554 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4555 IEM_MC_REL_JMP_S32(i32Imm);
4556 } IEM_MC_ELSE() {
4557 IEM_MC_ADVANCE_RIP();
4558 } IEM_MC_ENDIF();
4559 IEM_MC_END();
4560 }
4561 return VINF_SUCCESS;
4562}
4563
4564
4565/** Opcode 0x0f 0x8d. */
4566FNIEMOP_DEF(iemOp_jnl_Jv)
4567{
4568 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4569 IEMOP_HLP_MIN_386();
4570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4571 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4572 {
4573 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4575
4576 IEM_MC_BEGIN(0, 0);
4577 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4578 IEM_MC_ADVANCE_RIP();
4579 } IEM_MC_ELSE() {
4580 IEM_MC_REL_JMP_S16(i16Imm);
4581 } IEM_MC_ENDIF();
4582 IEM_MC_END();
4583 }
4584 else
4585 {
4586 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4588
4589 IEM_MC_BEGIN(0, 0);
4590 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4591 IEM_MC_ADVANCE_RIP();
4592 } IEM_MC_ELSE() {
4593 IEM_MC_REL_JMP_S32(i32Imm);
4594 } IEM_MC_ENDIF();
4595 IEM_MC_END();
4596 }
4597 return VINF_SUCCESS;
4598}
4599
4600
4601/** Opcode 0x0f 0x8e. */
4602FNIEMOP_DEF(iemOp_jle_Jv)
4603{
4604 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4605 IEMOP_HLP_MIN_386();
4606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4607 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4608 {
4609 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4611
4612 IEM_MC_BEGIN(0, 0);
4613 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4614 IEM_MC_REL_JMP_S16(i16Imm);
4615 } IEM_MC_ELSE() {
4616 IEM_MC_ADVANCE_RIP();
4617 } IEM_MC_ENDIF();
4618 IEM_MC_END();
4619 }
4620 else
4621 {
4622 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4624
4625 IEM_MC_BEGIN(0, 0);
4626 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4627 IEM_MC_REL_JMP_S32(i32Imm);
4628 } IEM_MC_ELSE() {
4629 IEM_MC_ADVANCE_RIP();
4630 } IEM_MC_ENDIF();
4631 IEM_MC_END();
4632 }
4633 return VINF_SUCCESS;
4634}
4635
4636
4637/** Opcode 0x0f 0x8f. */
4638FNIEMOP_DEF(iemOp_jnle_Jv)
4639{
4640 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4641 IEMOP_HLP_MIN_386();
4642 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4643 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4644 {
4645 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4647
4648 IEM_MC_BEGIN(0, 0);
4649 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4650 IEM_MC_ADVANCE_RIP();
4651 } IEM_MC_ELSE() {
4652 IEM_MC_REL_JMP_S16(i16Imm);
4653 } IEM_MC_ENDIF();
4654 IEM_MC_END();
4655 }
4656 else
4657 {
4658 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4660
4661 IEM_MC_BEGIN(0, 0);
4662 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4663 IEM_MC_ADVANCE_RIP();
4664 } IEM_MC_ELSE() {
4665 IEM_MC_REL_JMP_S32(i32Imm);
4666 } IEM_MC_ENDIF();
4667 IEM_MC_END();
4668 }
4669 return VINF_SUCCESS;
4670}
4671
4672
4673/** Opcode 0x0f 0x90. */
4674FNIEMOP_DEF(iemOp_seto_Eb)
4675{
4676 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4677 IEMOP_HLP_MIN_386();
4678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4679
4680 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4681 * any way. AMD says it's "unused", whatever that means. We're
4682 * ignoring for now. */
4683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4684 {
4685 /* register target */
4686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4687 IEM_MC_BEGIN(0, 0);
4688 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4689 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4690 } IEM_MC_ELSE() {
4691 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4692 } IEM_MC_ENDIF();
4693 IEM_MC_ADVANCE_RIP();
4694 IEM_MC_END();
4695 }
4696 else
4697 {
4698 /* memory target */
4699 IEM_MC_BEGIN(0, 1);
4700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4704 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4705 } IEM_MC_ELSE() {
4706 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4707 } IEM_MC_ENDIF();
4708 IEM_MC_ADVANCE_RIP();
4709 IEM_MC_END();
4710 }
4711 return VINF_SUCCESS;
4712}
4713
4714
4715/** Opcode 0x0f 0x91. */
4716FNIEMOP_DEF(iemOp_setno_Eb)
4717{
4718 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4719 IEMOP_HLP_MIN_386();
4720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4721
4722 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4723 * any way. AMD says it's "unused", whatever that means. We're
4724 * ignoring for now. */
4725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4726 {
4727 /* register target */
4728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4729 IEM_MC_BEGIN(0, 0);
4730 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4731 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4732 } IEM_MC_ELSE() {
4733 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4734 } IEM_MC_ENDIF();
4735 IEM_MC_ADVANCE_RIP();
4736 IEM_MC_END();
4737 }
4738 else
4739 {
4740 /* memory target */
4741 IEM_MC_BEGIN(0, 1);
4742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4746 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4747 } IEM_MC_ELSE() {
4748 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4749 } IEM_MC_ENDIF();
4750 IEM_MC_ADVANCE_RIP();
4751 IEM_MC_END();
4752 }
4753 return VINF_SUCCESS;
4754}
4755
4756
4757/** Opcode 0x0f 0x92. */
4758FNIEMOP_DEF(iemOp_setc_Eb)
4759{
4760 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4761 IEMOP_HLP_MIN_386();
4762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4763
4764 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4765 * any way. AMD says it's "unused", whatever that means. We're
4766 * ignoring for now. */
4767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4768 {
4769 /* register target */
4770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4771 IEM_MC_BEGIN(0, 0);
4772 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4773 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4774 } IEM_MC_ELSE() {
4775 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4776 } IEM_MC_ENDIF();
4777 IEM_MC_ADVANCE_RIP();
4778 IEM_MC_END();
4779 }
4780 else
4781 {
4782 /* memory target */
4783 IEM_MC_BEGIN(0, 1);
4784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4787 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4788 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4789 } IEM_MC_ELSE() {
4790 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4791 } IEM_MC_ENDIF();
4792 IEM_MC_ADVANCE_RIP();
4793 IEM_MC_END();
4794 }
4795 return VINF_SUCCESS;
4796}
4797
4798
4799/** Opcode 0x0f 0x93. */
4800FNIEMOP_DEF(iemOp_setnc_Eb)
4801{
4802 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4803 IEMOP_HLP_MIN_386();
4804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4805
4806 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4807 * any way. AMD says it's "unused", whatever that means. We're
4808 * ignoring for now. */
4809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4810 {
4811 /* register target */
4812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4813 IEM_MC_BEGIN(0, 0);
4814 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4815 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4816 } IEM_MC_ELSE() {
4817 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4818 } IEM_MC_ENDIF();
4819 IEM_MC_ADVANCE_RIP();
4820 IEM_MC_END();
4821 }
4822 else
4823 {
4824 /* memory target */
4825 IEM_MC_BEGIN(0, 1);
4826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4830 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4831 } IEM_MC_ELSE() {
4832 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4833 } IEM_MC_ENDIF();
4834 IEM_MC_ADVANCE_RIP();
4835 IEM_MC_END();
4836 }
4837 return VINF_SUCCESS;
4838}
4839
4840
4841/** Opcode 0x0f 0x94. */
4842FNIEMOP_DEF(iemOp_sete_Eb)
4843{
4844 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4845 IEMOP_HLP_MIN_386();
4846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4847
4848 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4849 * any way. AMD says it's "unused", whatever that means. We're
4850 * ignoring for now. */
4851 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4852 {
4853 /* register target */
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4855 IEM_MC_BEGIN(0, 0);
4856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4857 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4858 } IEM_MC_ELSE() {
4859 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4860 } IEM_MC_ENDIF();
4861 IEM_MC_ADVANCE_RIP();
4862 IEM_MC_END();
4863 }
4864 else
4865 {
4866 /* memory target */
4867 IEM_MC_BEGIN(0, 1);
4868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4871 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4872 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4873 } IEM_MC_ELSE() {
4874 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4875 } IEM_MC_ENDIF();
4876 IEM_MC_ADVANCE_RIP();
4877 IEM_MC_END();
4878 }
4879 return VINF_SUCCESS;
4880}
4881
4882
4883/** Opcode 0x0f 0x95. */
4884FNIEMOP_DEF(iemOp_setne_Eb)
4885{
4886 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4887 IEMOP_HLP_MIN_386();
4888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4889
4890 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4891 * any way. AMD says it's "unused", whatever that means. We're
4892 * ignoring for now. */
4893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4894 {
4895 /* register target */
4896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4897 IEM_MC_BEGIN(0, 0);
4898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4899 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4900 } IEM_MC_ELSE() {
4901 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4902 } IEM_MC_ENDIF();
4903 IEM_MC_ADVANCE_RIP();
4904 IEM_MC_END();
4905 }
4906 else
4907 {
4908 /* memory target */
4909 IEM_MC_BEGIN(0, 1);
4910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4913 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4914 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4915 } IEM_MC_ELSE() {
4916 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4917 } IEM_MC_ENDIF();
4918 IEM_MC_ADVANCE_RIP();
4919 IEM_MC_END();
4920 }
4921 return VINF_SUCCESS;
4922}
4923
4924
4925/** Opcode 0x0f 0x96. */
4926FNIEMOP_DEF(iemOp_setbe_Eb)
4927{
4928 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4929 IEMOP_HLP_MIN_386();
4930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4931
4932 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4933 * any way. AMD says it's "unused", whatever that means. We're
4934 * ignoring for now. */
4935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4936 {
4937 /* register target */
4938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4939 IEM_MC_BEGIN(0, 0);
4940 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4941 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4942 } IEM_MC_ELSE() {
4943 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4944 } IEM_MC_ENDIF();
4945 IEM_MC_ADVANCE_RIP();
4946 IEM_MC_END();
4947 }
4948 else
4949 {
4950 /* memory target */
4951 IEM_MC_BEGIN(0, 1);
4952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4955 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4956 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4957 } IEM_MC_ELSE() {
4958 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4959 } IEM_MC_ENDIF();
4960 IEM_MC_ADVANCE_RIP();
4961 IEM_MC_END();
4962 }
4963 return VINF_SUCCESS;
4964}
4965
4966
4967/** Opcode 0x0f 0x97. */
4968FNIEMOP_DEF(iemOp_setnbe_Eb)
4969{
4970 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4971 IEMOP_HLP_MIN_386();
4972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4973
4974 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4975 * any way. AMD says it's "unused", whatever that means. We're
4976 * ignoring for now. */
4977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4978 {
4979 /* register target */
4980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4981 IEM_MC_BEGIN(0, 0);
4982 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4983 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4984 } IEM_MC_ELSE() {
4985 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4986 } IEM_MC_ENDIF();
4987 IEM_MC_ADVANCE_RIP();
4988 IEM_MC_END();
4989 }
4990 else
4991 {
4992 /* memory target */
4993 IEM_MC_BEGIN(0, 1);
4994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4997 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4998 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4999 } IEM_MC_ELSE() {
5000 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5001 } IEM_MC_ENDIF();
5002 IEM_MC_ADVANCE_RIP();
5003 IEM_MC_END();
5004 }
5005 return VINF_SUCCESS;
5006}
5007
5008
5009/** Opcode 0x0f 0x98. */
5010FNIEMOP_DEF(iemOp_sets_Eb)
5011{
5012 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5013 IEMOP_HLP_MIN_386();
5014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5015
5016 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5017 * any way. AMD says it's "unused", whatever that means. We're
5018 * ignoring for now. */
5019 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5020 {
5021 /* register target */
5022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5023 IEM_MC_BEGIN(0, 0);
5024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5025 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5026 } IEM_MC_ELSE() {
5027 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5028 } IEM_MC_ENDIF();
5029 IEM_MC_ADVANCE_RIP();
5030 IEM_MC_END();
5031 }
5032 else
5033 {
5034 /* memory target */
5035 IEM_MC_BEGIN(0, 1);
5036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5039 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5040 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5041 } IEM_MC_ELSE() {
5042 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5043 } IEM_MC_ENDIF();
5044 IEM_MC_ADVANCE_RIP();
5045 IEM_MC_END();
5046 }
5047 return VINF_SUCCESS;
5048}
5049
5050
5051/** Opcode 0x0f 0x99. */
5052FNIEMOP_DEF(iemOp_setns_Eb)
5053{
5054 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5055 IEMOP_HLP_MIN_386();
5056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5057
5058 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5059 * any way. AMD says it's "unused", whatever that means. We're
5060 * ignoring for now. */
5061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5062 {
5063 /* register target */
5064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5065 IEM_MC_BEGIN(0, 0);
5066 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5067 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5068 } IEM_MC_ELSE() {
5069 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5070 } IEM_MC_ENDIF();
5071 IEM_MC_ADVANCE_RIP();
5072 IEM_MC_END();
5073 }
5074 else
5075 {
5076 /* memory target */
5077 IEM_MC_BEGIN(0, 1);
5078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5082 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5083 } IEM_MC_ELSE() {
5084 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5085 } IEM_MC_ENDIF();
5086 IEM_MC_ADVANCE_RIP();
5087 IEM_MC_END();
5088 }
5089 return VINF_SUCCESS;
5090}
5091
5092
5093/** Opcode 0x0f 0x9a. */
5094FNIEMOP_DEF(iemOp_setp_Eb)
5095{
5096 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5097 IEMOP_HLP_MIN_386();
5098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5099
5100 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5101 * any way. AMD says it's "unused", whatever that means. We're
5102 * ignoring for now. */
5103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5104 {
5105 /* register target */
5106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5107 IEM_MC_BEGIN(0, 0);
5108 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5109 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5110 } IEM_MC_ELSE() {
5111 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5112 } IEM_MC_ENDIF();
5113 IEM_MC_ADVANCE_RIP();
5114 IEM_MC_END();
5115 }
5116 else
5117 {
5118 /* memory target */
5119 IEM_MC_BEGIN(0, 1);
5120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5123 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5124 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5125 } IEM_MC_ELSE() {
5126 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5127 } IEM_MC_ENDIF();
5128 IEM_MC_ADVANCE_RIP();
5129 IEM_MC_END();
5130 }
5131 return VINF_SUCCESS;
5132}
5133
5134
5135/** Opcode 0x0f 0x9b. */
5136FNIEMOP_DEF(iemOp_setnp_Eb)
5137{
5138 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5139 IEMOP_HLP_MIN_386();
5140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5141
5142 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5143 * any way. AMD says it's "unused", whatever that means. We're
5144 * ignoring for now. */
5145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5146 {
5147 /* register target */
5148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5149 IEM_MC_BEGIN(0, 0);
5150 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5151 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5152 } IEM_MC_ELSE() {
5153 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5154 } IEM_MC_ENDIF();
5155 IEM_MC_ADVANCE_RIP();
5156 IEM_MC_END();
5157 }
5158 else
5159 {
5160 /* memory target */
5161 IEM_MC_BEGIN(0, 1);
5162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5166 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5167 } IEM_MC_ELSE() {
5168 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5169 } IEM_MC_ENDIF();
5170 IEM_MC_ADVANCE_RIP();
5171 IEM_MC_END();
5172 }
5173 return VINF_SUCCESS;
5174}
5175
5176
5177/** Opcode 0x0f 0x9c. */
5178FNIEMOP_DEF(iemOp_setl_Eb)
5179{
5180 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5181 IEMOP_HLP_MIN_386();
5182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5183
5184 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5185 * any way. AMD says it's "unused", whatever that means. We're
5186 * ignoring for now. */
5187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5188 {
5189 /* register target */
5190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5191 IEM_MC_BEGIN(0, 0);
5192 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5193 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5194 } IEM_MC_ELSE() {
5195 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5196 } IEM_MC_ENDIF();
5197 IEM_MC_ADVANCE_RIP();
5198 IEM_MC_END();
5199 }
5200 else
5201 {
5202 /* memory target */
5203 IEM_MC_BEGIN(0, 1);
5204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5207 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5208 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5209 } IEM_MC_ELSE() {
5210 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5211 } IEM_MC_ENDIF();
5212 IEM_MC_ADVANCE_RIP();
5213 IEM_MC_END();
5214 }
5215 return VINF_SUCCESS;
5216}
5217
5218
5219/** Opcode 0x0f 0x9d. */
5220FNIEMOP_DEF(iemOp_setnl_Eb)
5221{
5222 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5223 IEMOP_HLP_MIN_386();
5224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5225
5226 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5227 * any way. AMD says it's "unused", whatever that means. We're
5228 * ignoring for now. */
5229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5230 {
5231 /* register target */
5232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5233 IEM_MC_BEGIN(0, 0);
5234 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5235 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5236 } IEM_MC_ELSE() {
5237 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5238 } IEM_MC_ENDIF();
5239 IEM_MC_ADVANCE_RIP();
5240 IEM_MC_END();
5241 }
5242 else
5243 {
5244 /* memory target */
5245 IEM_MC_BEGIN(0, 1);
5246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5249 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5250 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5251 } IEM_MC_ELSE() {
5252 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5253 } IEM_MC_ENDIF();
5254 IEM_MC_ADVANCE_RIP();
5255 IEM_MC_END();
5256 }
5257 return VINF_SUCCESS;
5258}
5259
5260
5261/** Opcode 0x0f 0x9e. */
5262FNIEMOP_DEF(iemOp_setle_Eb)
5263{
5264 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5265 IEMOP_HLP_MIN_386();
5266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5267
5268 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5269 * any way. AMD says it's "unused", whatever that means. We're
5270 * ignoring for now. */
5271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5272 {
5273 /* register target */
5274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5275 IEM_MC_BEGIN(0, 0);
5276 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5277 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5278 } IEM_MC_ELSE() {
5279 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5280 } IEM_MC_ENDIF();
5281 IEM_MC_ADVANCE_RIP();
5282 IEM_MC_END();
5283 }
5284 else
5285 {
5286 /* memory target */
5287 IEM_MC_BEGIN(0, 1);
5288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5291 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5292 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5293 } IEM_MC_ELSE() {
5294 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5295 } IEM_MC_ENDIF();
5296 IEM_MC_ADVANCE_RIP();
5297 IEM_MC_END();
5298 }
5299 return VINF_SUCCESS;
5300}
5301
5302
5303/** Opcode 0x0f 0x9f. */
5304FNIEMOP_DEF(iemOp_setnle_Eb)
5305{
5306 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5307 IEMOP_HLP_MIN_386();
5308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5309
5310 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5311 * any way. AMD says it's "unused", whatever that means. We're
5312 * ignoring for now. */
5313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5314 {
5315 /* register target */
5316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5317 IEM_MC_BEGIN(0, 0);
5318 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5319 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5320 } IEM_MC_ELSE() {
5321 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5322 } IEM_MC_ENDIF();
5323 IEM_MC_ADVANCE_RIP();
5324 IEM_MC_END();
5325 }
5326 else
5327 {
5328 /* memory target */
5329 IEM_MC_BEGIN(0, 1);
5330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5333 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5334 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5335 } IEM_MC_ELSE() {
5336 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5337 } IEM_MC_ENDIF();
5338 IEM_MC_ADVANCE_RIP();
5339 IEM_MC_END();
5340 }
5341 return VINF_SUCCESS;
5342}
5343
5344
5345/**
5346 * Common 'push segment-register' helper.
5347 */
5348FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5349{
5350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5351 if (iReg < X86_SREG_FS)
5352 IEMOP_HLP_NO_64BIT();
5353 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5354
5355 switch (pVCpu->iem.s.enmEffOpSize)
5356 {
5357 case IEMMODE_16BIT:
5358 IEM_MC_BEGIN(0, 1);
5359 IEM_MC_LOCAL(uint16_t, u16Value);
5360 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5361 IEM_MC_PUSH_U16(u16Value);
5362 IEM_MC_ADVANCE_RIP();
5363 IEM_MC_END();
5364 break;
5365
5366 case IEMMODE_32BIT:
5367 IEM_MC_BEGIN(0, 1);
5368 IEM_MC_LOCAL(uint32_t, u32Value);
5369 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5370 IEM_MC_PUSH_U32_SREG(u32Value);
5371 IEM_MC_ADVANCE_RIP();
5372 IEM_MC_END();
5373 break;
5374
5375 case IEMMODE_64BIT:
5376 IEM_MC_BEGIN(0, 1);
5377 IEM_MC_LOCAL(uint64_t, u64Value);
5378 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5379 IEM_MC_PUSH_U64(u64Value);
5380 IEM_MC_ADVANCE_RIP();
5381 IEM_MC_END();
5382 break;
5383 }
5384
5385 return VINF_SUCCESS;
5386}
5387
5388
5389/** Opcode 0x0f 0xa0. */
5390FNIEMOP_DEF(iemOp_push_fs)
5391{
5392 IEMOP_MNEMONIC(push_fs, "push fs");
5393 IEMOP_HLP_MIN_386();
5394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5395 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5396}
5397
5398
5399/** Opcode 0x0f 0xa1. */
5400FNIEMOP_DEF(iemOp_pop_fs)
5401{
5402 IEMOP_MNEMONIC(pop_fs, "pop fs");
5403 IEMOP_HLP_MIN_386();
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5406}
5407
5408
5409/** Opcode 0x0f 0xa2. */
5410FNIEMOP_DEF(iemOp_cpuid)
5411{
5412 IEMOP_MNEMONIC(cpuid, "cpuid");
5413 IEMOP_HLP_MIN_486(); /* not all 486es. */
5414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5416}
5417
5418
5419/**
5420 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5421 * iemOp_bts_Ev_Gv.
5422 */
5423FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5424{
5425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5426 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5427
5428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5429 {
5430 /* register destination. */
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 switch (pVCpu->iem.s.enmEffOpSize)
5433 {
5434 case IEMMODE_16BIT:
5435 IEM_MC_BEGIN(3, 0);
5436 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5437 IEM_MC_ARG(uint16_t, u16Src, 1);
5438 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5439
5440 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5441 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5442 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5443 IEM_MC_REF_EFLAGS(pEFlags);
5444 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5445
5446 IEM_MC_ADVANCE_RIP();
5447 IEM_MC_END();
5448 return VINF_SUCCESS;
5449
5450 case IEMMODE_32BIT:
5451 IEM_MC_BEGIN(3, 0);
5452 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5453 IEM_MC_ARG(uint32_t, u32Src, 1);
5454 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5455
5456 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5457 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5458 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5459 IEM_MC_REF_EFLAGS(pEFlags);
5460 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5461
5462 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5463 IEM_MC_ADVANCE_RIP();
5464 IEM_MC_END();
5465 return VINF_SUCCESS;
5466
5467 case IEMMODE_64BIT:
5468 IEM_MC_BEGIN(3, 0);
5469 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5470 IEM_MC_ARG(uint64_t, u64Src, 1);
5471 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5472
5473 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5474 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5475 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5476 IEM_MC_REF_EFLAGS(pEFlags);
5477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5478
5479 IEM_MC_ADVANCE_RIP();
5480 IEM_MC_END();
5481 return VINF_SUCCESS;
5482
5483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5484 }
5485 }
5486 else
5487 {
5488 /* memory destination. */
5489
5490 uint32_t fAccess;
5491 if (pImpl->pfnLockedU16)
5492 fAccess = IEM_ACCESS_DATA_RW;
5493 else /* BT */
5494 fAccess = IEM_ACCESS_DATA_R;
5495
5496 /** @todo test negative bit offsets! */
5497 switch (pVCpu->iem.s.enmEffOpSize)
5498 {
5499 case IEMMODE_16BIT:
5500 IEM_MC_BEGIN(3, 2);
5501 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5502 IEM_MC_ARG(uint16_t, u16Src, 1);
5503 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5505 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5506
5507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5508 if (pImpl->pfnLockedU16)
5509 IEMOP_HLP_DONE_DECODING();
5510 else
5511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5512 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5513 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5514 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5515 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5516 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5517 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5518 IEM_MC_FETCH_EFLAGS(EFlags);
5519
5520 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5521 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5523 else
5524 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5525 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5526
5527 IEM_MC_COMMIT_EFLAGS(EFlags);
5528 IEM_MC_ADVANCE_RIP();
5529 IEM_MC_END();
5530 return VINF_SUCCESS;
5531
5532 case IEMMODE_32BIT:
5533 IEM_MC_BEGIN(3, 2);
5534 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5535 IEM_MC_ARG(uint32_t, u32Src, 1);
5536 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5538 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5539
5540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5541 if (pImpl->pfnLockedU16)
5542 IEMOP_HLP_DONE_DECODING();
5543 else
5544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5545 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5546 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5547 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5548 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5549 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5550 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5551 IEM_MC_FETCH_EFLAGS(EFlags);
5552
5553 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5554 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5556 else
5557 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5558 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5559
5560 IEM_MC_COMMIT_EFLAGS(EFlags);
5561 IEM_MC_ADVANCE_RIP();
5562 IEM_MC_END();
5563 return VINF_SUCCESS;
5564
5565 case IEMMODE_64BIT:
5566 IEM_MC_BEGIN(3, 2);
5567 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5568 IEM_MC_ARG(uint64_t, u64Src, 1);
5569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5571 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5572
5573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5574 if (pImpl->pfnLockedU16)
5575 IEMOP_HLP_DONE_DECODING();
5576 else
5577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5578 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5579 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5580 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5581 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5582 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5583 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5584 IEM_MC_FETCH_EFLAGS(EFlags);
5585
5586 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5587 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5588 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5589 else
5590 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5591 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5592
5593 IEM_MC_COMMIT_EFLAGS(EFlags);
5594 IEM_MC_ADVANCE_RIP();
5595 IEM_MC_END();
5596 return VINF_SUCCESS;
5597
5598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5599 }
5600 }
5601}
5602
5603
5604/** Opcode 0x0f 0xa3. */
5605FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5606{
5607 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5608 IEMOP_HLP_MIN_386();
5609 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5610}
5611
5612
5613/**
5614 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5615 */
5616FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5617{
5618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5619 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5620
5621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5622 {
5623 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5625
5626 switch (pVCpu->iem.s.enmEffOpSize)
5627 {
5628 case IEMMODE_16BIT:
5629 IEM_MC_BEGIN(4, 0);
5630 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5631 IEM_MC_ARG(uint16_t, u16Src, 1);
5632 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5633 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5634
5635 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5636 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5637 IEM_MC_REF_EFLAGS(pEFlags);
5638 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5639
5640 IEM_MC_ADVANCE_RIP();
5641 IEM_MC_END();
5642 return VINF_SUCCESS;
5643
5644 case IEMMODE_32BIT:
5645 IEM_MC_BEGIN(4, 0);
5646 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5647 IEM_MC_ARG(uint32_t, u32Src, 1);
5648 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5649 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5650
5651 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5652 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5653 IEM_MC_REF_EFLAGS(pEFlags);
5654 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5655
5656 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5657 IEM_MC_ADVANCE_RIP();
5658 IEM_MC_END();
5659 return VINF_SUCCESS;
5660
5661 case IEMMODE_64BIT:
5662 IEM_MC_BEGIN(4, 0);
5663 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5664 IEM_MC_ARG(uint64_t, u64Src, 1);
5665 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5666 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5667
5668 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5669 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5670 IEM_MC_REF_EFLAGS(pEFlags);
5671 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5672
5673 IEM_MC_ADVANCE_RIP();
5674 IEM_MC_END();
5675 return VINF_SUCCESS;
5676
5677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5678 }
5679 }
5680 else
5681 {
5682 switch (pVCpu->iem.s.enmEffOpSize)
5683 {
5684 case IEMMODE_16BIT:
5685 IEM_MC_BEGIN(4, 2);
5686 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5687 IEM_MC_ARG(uint16_t, u16Src, 1);
5688 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5691
5692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5693 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5694 IEM_MC_ASSIGN(cShiftArg, cShift);
5695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5696 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5697 IEM_MC_FETCH_EFLAGS(EFlags);
5698 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5699 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5700
5701 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5702 IEM_MC_COMMIT_EFLAGS(EFlags);
5703 IEM_MC_ADVANCE_RIP();
5704 IEM_MC_END();
5705 return VINF_SUCCESS;
5706
5707 case IEMMODE_32BIT:
5708 IEM_MC_BEGIN(4, 2);
5709 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5710 IEM_MC_ARG(uint32_t, u32Src, 1);
5711 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5712 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5714
5715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5716 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5717 IEM_MC_ASSIGN(cShiftArg, cShift);
5718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5719 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5720 IEM_MC_FETCH_EFLAGS(EFlags);
5721 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5722 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5723
5724 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5725 IEM_MC_COMMIT_EFLAGS(EFlags);
5726 IEM_MC_ADVANCE_RIP();
5727 IEM_MC_END();
5728 return VINF_SUCCESS;
5729
5730 case IEMMODE_64BIT:
5731 IEM_MC_BEGIN(4, 2);
5732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5733 IEM_MC_ARG(uint64_t, u64Src, 1);
5734 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5735 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5737
5738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5739 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5740 IEM_MC_ASSIGN(cShiftArg, cShift);
5741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5742 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5743 IEM_MC_FETCH_EFLAGS(EFlags);
5744 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5745 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5746
5747 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5748 IEM_MC_COMMIT_EFLAGS(EFlags);
5749 IEM_MC_ADVANCE_RIP();
5750 IEM_MC_END();
5751 return VINF_SUCCESS;
5752
5753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5754 }
5755 }
5756}
5757
5758
5759/**
5760 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5761 */
5762FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5763{
5764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5765 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5766
5767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5768 {
5769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5770
5771 switch (pVCpu->iem.s.enmEffOpSize)
5772 {
5773 case IEMMODE_16BIT:
5774 IEM_MC_BEGIN(4, 0);
5775 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5776 IEM_MC_ARG(uint16_t, u16Src, 1);
5777 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5778 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5779
5780 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5781 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5782 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5783 IEM_MC_REF_EFLAGS(pEFlags);
5784 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5785
5786 IEM_MC_ADVANCE_RIP();
5787 IEM_MC_END();
5788 return VINF_SUCCESS;
5789
5790 case IEMMODE_32BIT:
5791 IEM_MC_BEGIN(4, 0);
5792 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5793 IEM_MC_ARG(uint32_t, u32Src, 1);
5794 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5795 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5796
5797 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5798 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5799 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5800 IEM_MC_REF_EFLAGS(pEFlags);
5801 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5802
5803 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5804 IEM_MC_ADVANCE_RIP();
5805 IEM_MC_END();
5806 return VINF_SUCCESS;
5807
5808 case IEMMODE_64BIT:
5809 IEM_MC_BEGIN(4, 0);
5810 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5811 IEM_MC_ARG(uint64_t, u64Src, 1);
5812 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5813 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5814
5815 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5816 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5817 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5818 IEM_MC_REF_EFLAGS(pEFlags);
5819 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5820
5821 IEM_MC_ADVANCE_RIP();
5822 IEM_MC_END();
5823 return VINF_SUCCESS;
5824
5825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5826 }
5827 }
5828 else
5829 {
5830 switch (pVCpu->iem.s.enmEffOpSize)
5831 {
5832 case IEMMODE_16BIT:
5833 IEM_MC_BEGIN(4, 2);
5834 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5835 IEM_MC_ARG(uint16_t, u16Src, 1);
5836 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5837 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5839
5840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5842 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5843 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5844 IEM_MC_FETCH_EFLAGS(EFlags);
5845 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5846 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5847
5848 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5849 IEM_MC_COMMIT_EFLAGS(EFlags);
5850 IEM_MC_ADVANCE_RIP();
5851 IEM_MC_END();
5852 return VINF_SUCCESS;
5853
5854 case IEMMODE_32BIT:
5855 IEM_MC_BEGIN(4, 2);
5856 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5857 IEM_MC_ARG(uint32_t, u32Src, 1);
5858 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5859 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5861
5862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5864 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5865 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5866 IEM_MC_FETCH_EFLAGS(EFlags);
5867 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5868 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5869
5870 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5871 IEM_MC_COMMIT_EFLAGS(EFlags);
5872 IEM_MC_ADVANCE_RIP();
5873 IEM_MC_END();
5874 return VINF_SUCCESS;
5875
5876 case IEMMODE_64BIT:
5877 IEM_MC_BEGIN(4, 2);
5878 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5879 IEM_MC_ARG(uint64_t, u64Src, 1);
5880 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5881 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5883
5884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5886 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5887 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5888 IEM_MC_FETCH_EFLAGS(EFlags);
5889 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5890 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5891
5892 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5893 IEM_MC_COMMIT_EFLAGS(EFlags);
5894 IEM_MC_ADVANCE_RIP();
5895 IEM_MC_END();
5896 return VINF_SUCCESS;
5897
5898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5899 }
5900 }
5901}
5902
5903
5904
5905/** Opcode 0x0f 0xa4. */
5906FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5907{
5908 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5909 IEMOP_HLP_MIN_386();
5910 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5911}
5912
5913
5914/** Opcode 0x0f 0xa5. */
5915FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5916{
5917 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5918 IEMOP_HLP_MIN_386();
5919 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5920}
5921
5922
5923/** Opcode 0x0f 0xa8. */
5924FNIEMOP_DEF(iemOp_push_gs)
5925{
5926 IEMOP_MNEMONIC(push_gs, "push gs");
5927 IEMOP_HLP_MIN_386();
5928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5929 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5930}
5931
5932
5933/** Opcode 0x0f 0xa9. */
5934FNIEMOP_DEF(iemOp_pop_gs)
5935{
5936 IEMOP_MNEMONIC(pop_gs, "pop gs");
5937 IEMOP_HLP_MIN_386();
5938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5939 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5940}
5941
5942
5943/** Opcode 0x0f 0xaa. */
5944FNIEMOP_STUB(iemOp_rsm);
5945//IEMOP_HLP_MIN_386();
5946
5947
5948/** Opcode 0x0f 0xab. */
5949FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5950{
5951 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5952 IEMOP_HLP_MIN_386();
5953 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5954}
5955
5956
5957/** Opcode 0x0f 0xac. */
5958FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5959{
5960 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5961 IEMOP_HLP_MIN_386();
5962 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5963}
5964
5965
5966/** Opcode 0x0f 0xad. */
5967FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5968{
5969 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5970 IEMOP_HLP_MIN_386();
5971 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5972}
5973
5974
5975/** Opcode 0x0f 0xae mem/0. */
5976FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5977{
5978 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5979 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5980 return IEMOP_RAISE_INVALID_OPCODE();
5981
5982 IEM_MC_BEGIN(3, 1);
5983 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5984 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5985 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5988 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5989 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5990 IEM_MC_END();
5991 return VINF_SUCCESS;
5992}
5993
5994
5995/** Opcode 0x0f 0xae mem/1. */
5996FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5997{
5998 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5999 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6000 return IEMOP_RAISE_INVALID_OPCODE();
6001
6002 IEM_MC_BEGIN(3, 1);
6003 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6004 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6005 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6008 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6009 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6010 IEM_MC_END();
6011 return VINF_SUCCESS;
6012}
6013
6014
6015/** Opcode 0x0f 0xae mem/2. */
6016FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
6017
6018/** Opcode 0x0f 0xae mem/3. */
6019FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
6020
6021/** Opcode 0x0f 0xae mem/4. */
6022FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
6023
6024/** Opcode 0x0f 0xae mem/5. */
6025FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
6026
6027/** Opcode 0x0f 0xae mem/6. */
6028FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6029
6030/** Opcode 0x0f 0xae mem/7. */
6031FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
6032
6033
6034/** Opcode 0x0f 0xae 11b/5. */
6035FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6036{
6037 RT_NOREF_PV(bRm);
6038 IEMOP_MNEMONIC(lfence, "lfence");
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6040 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6041 return IEMOP_RAISE_INVALID_OPCODE();
6042
6043 IEM_MC_BEGIN(0, 0);
6044 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6045 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6046 else
6047 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6048 IEM_MC_ADVANCE_RIP();
6049 IEM_MC_END();
6050 return VINF_SUCCESS;
6051}
6052
6053
6054/** Opcode 0x0f 0xae 11b/6. */
6055FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6056{
6057 RT_NOREF_PV(bRm);
6058 IEMOP_MNEMONIC(mfence, "mfence");
6059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6060 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6061 return IEMOP_RAISE_INVALID_OPCODE();
6062
6063 IEM_MC_BEGIN(0, 0);
6064 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6065 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6066 else
6067 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6068 IEM_MC_ADVANCE_RIP();
6069 IEM_MC_END();
6070 return VINF_SUCCESS;
6071}
6072
6073
6074/** Opcode 0x0f 0xae 11b/7. */
6075FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6076{
6077 RT_NOREF_PV(bRm);
6078 IEMOP_MNEMONIC(sfence, "sfence");
6079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6080 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6081 return IEMOP_RAISE_INVALID_OPCODE();
6082
6083 IEM_MC_BEGIN(0, 0);
6084 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6085 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6086 else
6087 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6088 IEM_MC_ADVANCE_RIP();
6089 IEM_MC_END();
6090 return VINF_SUCCESS;
6091}
6092
6093
6094/** Opcode 0xf3 0x0f 0xae 11b/0. */
6095FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6096
6097/** Opcode 0xf3 0x0f 0xae 11b/1. */
6098FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6099
6100/** Opcode 0xf3 0x0f 0xae 11b/2. */
6101FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6102
6103/** Opcode 0xf3 0x0f 0xae 11b/3. */
6104FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6105
6106
6107/** Opcode 0x0f 0xae. */
6108FNIEMOP_DEF(iemOp_Grp15)
6109{
6110 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6112 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6113 {
6114 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6115 {
6116 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
6117 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
6118 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
6119 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
6120 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
6121 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
6122 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
6123 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
6124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6125 }
6126 }
6127 else
6128 {
6129 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
6130 {
6131 case 0:
6132 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6133 {
6134 case 0: return IEMOP_RAISE_INVALID_OPCODE();
6135 case 1: return IEMOP_RAISE_INVALID_OPCODE();
6136 case 2: return IEMOP_RAISE_INVALID_OPCODE();
6137 case 3: return IEMOP_RAISE_INVALID_OPCODE();
6138 case 4: return IEMOP_RAISE_INVALID_OPCODE();
6139 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
6140 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
6141 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
6142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6143 }
6144 break;
6145
6146 case IEM_OP_PRF_REPZ:
6147 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6148 {
6149 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
6150 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
6151 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
6152 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
6153 case 4: return IEMOP_RAISE_INVALID_OPCODE();
6154 case 5: return IEMOP_RAISE_INVALID_OPCODE();
6155 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6156 case 7: return IEMOP_RAISE_INVALID_OPCODE();
6157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6158 }
6159 break;
6160
6161 default:
6162 return IEMOP_RAISE_INVALID_OPCODE();
6163 }
6164 }
6165}
6166
6167
6168/** Opcode 0x0f 0xaf. */
6169FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6170{
6171 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6172 IEMOP_HLP_MIN_386();
6173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6174 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6175}
6176
6177
6178/** Opcode 0x0f 0xb0. */
6179FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6180{
6181 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6182 IEMOP_HLP_MIN_486();
6183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6184
6185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6186 {
6187 IEMOP_HLP_DONE_DECODING();
6188 IEM_MC_BEGIN(4, 0);
6189 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6190 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6191 IEM_MC_ARG(uint8_t, u8Src, 2);
6192 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6193
6194 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6195 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6196 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6197 IEM_MC_REF_EFLAGS(pEFlags);
6198 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6199 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6200 else
6201 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6202
6203 IEM_MC_ADVANCE_RIP();
6204 IEM_MC_END();
6205 }
6206 else
6207 {
6208 IEM_MC_BEGIN(4, 3);
6209 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6210 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6211 IEM_MC_ARG(uint8_t, u8Src, 2);
6212 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6214 IEM_MC_LOCAL(uint8_t, u8Al);
6215
6216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6217 IEMOP_HLP_DONE_DECODING();
6218 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6219 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6220 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6221 IEM_MC_FETCH_EFLAGS(EFlags);
6222 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6223 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6224 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6225 else
6226 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6227
6228 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6229 IEM_MC_COMMIT_EFLAGS(EFlags);
6230 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6231 IEM_MC_ADVANCE_RIP();
6232 IEM_MC_END();
6233 }
6234 return VINF_SUCCESS;
6235}
6236
6237/** Opcode 0x0f 0xb1. */
6238FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6239{
6240 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6241 IEMOP_HLP_MIN_486();
6242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6243
6244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6245 {
6246 IEMOP_HLP_DONE_DECODING();
6247 switch (pVCpu->iem.s.enmEffOpSize)
6248 {
6249 case IEMMODE_16BIT:
6250 IEM_MC_BEGIN(4, 0);
6251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6252 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6253 IEM_MC_ARG(uint16_t, u16Src, 2);
6254 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6255
6256 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6257 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6258 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6259 IEM_MC_REF_EFLAGS(pEFlags);
6260 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6261 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6262 else
6263 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6264
6265 IEM_MC_ADVANCE_RIP();
6266 IEM_MC_END();
6267 return VINF_SUCCESS;
6268
6269 case IEMMODE_32BIT:
6270 IEM_MC_BEGIN(4, 0);
6271 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6272 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6273 IEM_MC_ARG(uint32_t, u32Src, 2);
6274 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6275
6276 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6277 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6278 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6279 IEM_MC_REF_EFLAGS(pEFlags);
6280 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6281 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6282 else
6283 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6284
6285 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6286 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6287 IEM_MC_ADVANCE_RIP();
6288 IEM_MC_END();
6289 return VINF_SUCCESS;
6290
6291 case IEMMODE_64BIT:
6292 IEM_MC_BEGIN(4, 0);
6293 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6294 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6295#ifdef RT_ARCH_X86
6296 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6297#else
6298 IEM_MC_ARG(uint64_t, u64Src, 2);
6299#endif
6300 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6301
6302 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6303 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6304 IEM_MC_REF_EFLAGS(pEFlags);
6305#ifdef RT_ARCH_X86
6306 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6307 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6308 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6309 else
6310 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6311#else
6312 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6313 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6314 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6315 else
6316 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6317#endif
6318
6319 IEM_MC_ADVANCE_RIP();
6320 IEM_MC_END();
6321 return VINF_SUCCESS;
6322
6323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6324 }
6325 }
6326 else
6327 {
6328 switch (pVCpu->iem.s.enmEffOpSize)
6329 {
6330 case IEMMODE_16BIT:
6331 IEM_MC_BEGIN(4, 3);
6332 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6333 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6334 IEM_MC_ARG(uint16_t, u16Src, 2);
6335 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6337 IEM_MC_LOCAL(uint16_t, u16Ax);
6338
6339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6340 IEMOP_HLP_DONE_DECODING();
6341 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6342 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6343 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6344 IEM_MC_FETCH_EFLAGS(EFlags);
6345 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6346 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6347 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6348 else
6349 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6350
6351 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6352 IEM_MC_COMMIT_EFLAGS(EFlags);
6353 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6354 IEM_MC_ADVANCE_RIP();
6355 IEM_MC_END();
6356 return VINF_SUCCESS;
6357
6358 case IEMMODE_32BIT:
6359 IEM_MC_BEGIN(4, 3);
6360 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6361 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6362 IEM_MC_ARG(uint32_t, u32Src, 2);
6363 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6365 IEM_MC_LOCAL(uint32_t, u32Eax);
6366
6367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6368 IEMOP_HLP_DONE_DECODING();
6369 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6370 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6371 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6372 IEM_MC_FETCH_EFLAGS(EFlags);
6373 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6374 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6375 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6376 else
6377 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6378
6379 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6380 IEM_MC_COMMIT_EFLAGS(EFlags);
6381 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6382 IEM_MC_ADVANCE_RIP();
6383 IEM_MC_END();
6384 return VINF_SUCCESS;
6385
6386 case IEMMODE_64BIT:
6387 IEM_MC_BEGIN(4, 3);
6388 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6389 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6390#ifdef RT_ARCH_X86
6391 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6392#else
6393 IEM_MC_ARG(uint64_t, u64Src, 2);
6394#endif
6395 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6397 IEM_MC_LOCAL(uint64_t, u64Rax);
6398
6399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6400 IEMOP_HLP_DONE_DECODING();
6401 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6402 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6403 IEM_MC_FETCH_EFLAGS(EFlags);
6404 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6405#ifdef RT_ARCH_X86
6406 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6407 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6408 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6409 else
6410 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6411#else
6412 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6413 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6414 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6415 else
6416 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6417#endif
6418
6419 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6420 IEM_MC_COMMIT_EFLAGS(EFlags);
6421 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6422 IEM_MC_ADVANCE_RIP();
6423 IEM_MC_END();
6424 return VINF_SUCCESS;
6425
6426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6427 }
6428 }
6429}
6430
6431
6432FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6433{
6434 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6435 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6436
6437 switch (pVCpu->iem.s.enmEffOpSize)
6438 {
6439 case IEMMODE_16BIT:
6440 IEM_MC_BEGIN(5, 1);
6441 IEM_MC_ARG(uint16_t, uSel, 0);
6442 IEM_MC_ARG(uint16_t, offSeg, 1);
6443 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6444 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6445 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6446 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6450 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6451 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6452 IEM_MC_END();
6453 return VINF_SUCCESS;
6454
6455 case IEMMODE_32BIT:
6456 IEM_MC_BEGIN(5, 1);
6457 IEM_MC_ARG(uint16_t, uSel, 0);
6458 IEM_MC_ARG(uint32_t, offSeg, 1);
6459 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6460 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6461 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6462 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6465 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6466 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6467 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6468 IEM_MC_END();
6469 return VINF_SUCCESS;
6470
6471 case IEMMODE_64BIT:
6472 IEM_MC_BEGIN(5, 1);
6473 IEM_MC_ARG(uint16_t, uSel, 0);
6474 IEM_MC_ARG(uint64_t, offSeg, 1);
6475 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6476 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6477 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6478 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6481 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6482 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6483 else
6484 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6485 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6486 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6487 IEM_MC_END();
6488 return VINF_SUCCESS;
6489
6490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6491 }
6492}
6493
6494
6495/** Opcode 0x0f 0xb2. */
6496FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6497{
6498 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6499 IEMOP_HLP_MIN_386();
6500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6502 return IEMOP_RAISE_INVALID_OPCODE();
6503 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6504}
6505
6506
6507/** Opcode 0x0f 0xb3. */
6508FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6509{
6510 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6511 IEMOP_HLP_MIN_386();
6512 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6513}
6514
6515
6516/** Opcode 0x0f 0xb4. */
6517FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6518{
6519 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6520 IEMOP_HLP_MIN_386();
6521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6523 return IEMOP_RAISE_INVALID_OPCODE();
6524 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6525}
6526
6527
6528/** Opcode 0x0f 0xb5. */
6529FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6530{
6531 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6532 IEMOP_HLP_MIN_386();
6533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6535 return IEMOP_RAISE_INVALID_OPCODE();
6536 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6537}
6538
6539
6540/** Opcode 0x0f 0xb6. */
6541FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6542{
6543 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6544 IEMOP_HLP_MIN_386();
6545
6546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6547
6548 /*
6549 * If rm is denoting a register, no more instruction bytes.
6550 */
6551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6552 {
6553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6554 switch (pVCpu->iem.s.enmEffOpSize)
6555 {
6556 case IEMMODE_16BIT:
6557 IEM_MC_BEGIN(0, 1);
6558 IEM_MC_LOCAL(uint16_t, u16Value);
6559 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6560 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6561 IEM_MC_ADVANCE_RIP();
6562 IEM_MC_END();
6563 return VINF_SUCCESS;
6564
6565 case IEMMODE_32BIT:
6566 IEM_MC_BEGIN(0, 1);
6567 IEM_MC_LOCAL(uint32_t, u32Value);
6568 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6569 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6570 IEM_MC_ADVANCE_RIP();
6571 IEM_MC_END();
6572 return VINF_SUCCESS;
6573
6574 case IEMMODE_64BIT:
6575 IEM_MC_BEGIN(0, 1);
6576 IEM_MC_LOCAL(uint64_t, u64Value);
6577 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6578 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6579 IEM_MC_ADVANCE_RIP();
6580 IEM_MC_END();
6581 return VINF_SUCCESS;
6582
6583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6584 }
6585 }
6586 else
6587 {
6588 /*
6589 * We're loading a register from memory.
6590 */
6591 switch (pVCpu->iem.s.enmEffOpSize)
6592 {
6593 case IEMMODE_16BIT:
6594 IEM_MC_BEGIN(0, 2);
6595 IEM_MC_LOCAL(uint16_t, u16Value);
6596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6599 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6600 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6601 IEM_MC_ADVANCE_RIP();
6602 IEM_MC_END();
6603 return VINF_SUCCESS;
6604
6605 case IEMMODE_32BIT:
6606 IEM_MC_BEGIN(0, 2);
6607 IEM_MC_LOCAL(uint32_t, u32Value);
6608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6611 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6612 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6613 IEM_MC_ADVANCE_RIP();
6614 IEM_MC_END();
6615 return VINF_SUCCESS;
6616
6617 case IEMMODE_64BIT:
6618 IEM_MC_BEGIN(0, 2);
6619 IEM_MC_LOCAL(uint64_t, u64Value);
6620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6624 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6625 IEM_MC_ADVANCE_RIP();
6626 IEM_MC_END();
6627 return VINF_SUCCESS;
6628
6629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6630 }
6631 }
6632}
6633
6634
6635/** Opcode 0x0f 0xb7. */
6636FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6637{
6638 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6639 IEMOP_HLP_MIN_386();
6640
6641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6642
6643 /** @todo Not entirely sure how the operand size prefix is handled here,
6644 * assuming that it will be ignored. Would be nice to have a few
6645 * test for this. */
6646 /*
6647 * If rm is denoting a register, no more instruction bytes.
6648 */
6649 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6650 {
6651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6652 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6653 {
6654 IEM_MC_BEGIN(0, 1);
6655 IEM_MC_LOCAL(uint32_t, u32Value);
6656 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6657 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6658 IEM_MC_ADVANCE_RIP();
6659 IEM_MC_END();
6660 }
6661 else
6662 {
6663 IEM_MC_BEGIN(0, 1);
6664 IEM_MC_LOCAL(uint64_t, u64Value);
6665 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6666 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6667 IEM_MC_ADVANCE_RIP();
6668 IEM_MC_END();
6669 }
6670 }
6671 else
6672 {
6673 /*
6674 * We're loading a register from memory.
6675 */
6676 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6677 {
6678 IEM_MC_BEGIN(0, 2);
6679 IEM_MC_LOCAL(uint32_t, u32Value);
6680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6683 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6684 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6685 IEM_MC_ADVANCE_RIP();
6686 IEM_MC_END();
6687 }
6688 else
6689 {
6690 IEM_MC_BEGIN(0, 2);
6691 IEM_MC_LOCAL(uint64_t, u64Value);
6692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6695 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6696 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6697 IEM_MC_ADVANCE_RIP();
6698 IEM_MC_END();
6699 }
6700 }
6701 return VINF_SUCCESS;
6702}
6703
6704
6705/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6706FNIEMOP_UD_STUB(iemOp_jmpe);
6707/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6708FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6709
6710
6711/** Opcode 0x0f 0xb9. */
6712FNIEMOP_DEF(iemOp_Grp10)
6713{
6714 Log(("iemOp_Grp10 -> #UD\n"));
6715 return IEMOP_RAISE_INVALID_OPCODE();
6716}
6717
6718
6719/** Opcode 0x0f 0xba. */
6720FNIEMOP_DEF(iemOp_Grp8)
6721{
6722 IEMOP_HLP_MIN_386();
6723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6724 PCIEMOPBINSIZES pImpl;
6725 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6726 {
6727 case 0: case 1: case 2: case 3:
6728 return IEMOP_RAISE_INVALID_OPCODE();
6729 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6730 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6731 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6732 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6734 }
6735 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6736
6737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6738 {
6739 /* register destination. */
6740 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6742
6743 switch (pVCpu->iem.s.enmEffOpSize)
6744 {
6745 case IEMMODE_16BIT:
6746 IEM_MC_BEGIN(3, 0);
6747 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6748 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6749 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6750
6751 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6752 IEM_MC_REF_EFLAGS(pEFlags);
6753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6754
6755 IEM_MC_ADVANCE_RIP();
6756 IEM_MC_END();
6757 return VINF_SUCCESS;
6758
6759 case IEMMODE_32BIT:
6760 IEM_MC_BEGIN(3, 0);
6761 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6762 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6763 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6764
6765 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6766 IEM_MC_REF_EFLAGS(pEFlags);
6767 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6768
6769 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6770 IEM_MC_ADVANCE_RIP();
6771 IEM_MC_END();
6772 return VINF_SUCCESS;
6773
6774 case IEMMODE_64BIT:
6775 IEM_MC_BEGIN(3, 0);
6776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6777 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6778 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6779
6780 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6781 IEM_MC_REF_EFLAGS(pEFlags);
6782 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6783
6784 IEM_MC_ADVANCE_RIP();
6785 IEM_MC_END();
6786 return VINF_SUCCESS;
6787
6788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6789 }
6790 }
6791 else
6792 {
6793 /* memory destination. */
6794
6795 uint32_t fAccess;
6796 if (pImpl->pfnLockedU16)
6797 fAccess = IEM_ACCESS_DATA_RW;
6798 else /* BT */
6799 fAccess = IEM_ACCESS_DATA_R;
6800
6801 /** @todo test negative bit offsets! */
6802 switch (pVCpu->iem.s.enmEffOpSize)
6803 {
6804 case IEMMODE_16BIT:
6805 IEM_MC_BEGIN(3, 1);
6806 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6807 IEM_MC_ARG(uint16_t, u16Src, 1);
6808 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6810
6811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6812 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6813 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6814 if (pImpl->pfnLockedU16)
6815 IEMOP_HLP_DONE_DECODING();
6816 else
6817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6818 IEM_MC_FETCH_EFLAGS(EFlags);
6819 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6820 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6821 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6822 else
6823 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6824 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6825
6826 IEM_MC_COMMIT_EFLAGS(EFlags);
6827 IEM_MC_ADVANCE_RIP();
6828 IEM_MC_END();
6829 return VINF_SUCCESS;
6830
6831 case IEMMODE_32BIT:
6832 IEM_MC_BEGIN(3, 1);
6833 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6834 IEM_MC_ARG(uint32_t, u32Src, 1);
6835 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6837
6838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6839 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6840 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6841 if (pImpl->pfnLockedU16)
6842 IEMOP_HLP_DONE_DECODING();
6843 else
6844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6845 IEM_MC_FETCH_EFLAGS(EFlags);
6846 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6847 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6848 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6849 else
6850 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6851 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6852
6853 IEM_MC_COMMIT_EFLAGS(EFlags);
6854 IEM_MC_ADVANCE_RIP();
6855 IEM_MC_END();
6856 return VINF_SUCCESS;
6857
6858 case IEMMODE_64BIT:
6859 IEM_MC_BEGIN(3, 1);
6860 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6861 IEM_MC_ARG(uint64_t, u64Src, 1);
6862 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6864
6865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6866 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6867 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6868 if (pImpl->pfnLockedU16)
6869 IEMOP_HLP_DONE_DECODING();
6870 else
6871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6872 IEM_MC_FETCH_EFLAGS(EFlags);
6873 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6874 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6876 else
6877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6878 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6879
6880 IEM_MC_COMMIT_EFLAGS(EFlags);
6881 IEM_MC_ADVANCE_RIP();
6882 IEM_MC_END();
6883 return VINF_SUCCESS;
6884
6885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6886 }
6887 }
6888
6889}
6890
6891
6892/** Opcode 0x0f 0xbb. */
6893FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6894{
6895 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6896 IEMOP_HLP_MIN_386();
6897 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6898}
6899
6900
6901/** Opcode 0x0f 0xbc. */
6902FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6903{
6904 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6905 IEMOP_HLP_MIN_386();
6906 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6907 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6908}
6909
6910
6911/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6912FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6913
6914
6915/** Opcode 0x0f 0xbd. */
6916FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6917{
6918 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6919 IEMOP_HLP_MIN_386();
6920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6921 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6922}
6923
6924
6925/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6926FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6927
6928
6929/** Opcode 0x0f 0xbe. */
6930FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6931{
6932 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6933 IEMOP_HLP_MIN_386();
6934
6935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6936
6937 /*
6938 * If rm is denoting a register, no more instruction bytes.
6939 */
6940 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6941 {
6942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6943 switch (pVCpu->iem.s.enmEffOpSize)
6944 {
6945 case IEMMODE_16BIT:
6946 IEM_MC_BEGIN(0, 1);
6947 IEM_MC_LOCAL(uint16_t, u16Value);
6948 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6949 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6950 IEM_MC_ADVANCE_RIP();
6951 IEM_MC_END();
6952 return VINF_SUCCESS;
6953
6954 case IEMMODE_32BIT:
6955 IEM_MC_BEGIN(0, 1);
6956 IEM_MC_LOCAL(uint32_t, u32Value);
6957 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6958 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6959 IEM_MC_ADVANCE_RIP();
6960 IEM_MC_END();
6961 return VINF_SUCCESS;
6962
6963 case IEMMODE_64BIT:
6964 IEM_MC_BEGIN(0, 1);
6965 IEM_MC_LOCAL(uint64_t, u64Value);
6966 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6967 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6968 IEM_MC_ADVANCE_RIP();
6969 IEM_MC_END();
6970 return VINF_SUCCESS;
6971
6972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6973 }
6974 }
6975 else
6976 {
6977 /*
6978 * We're loading a register from memory.
6979 */
6980 switch (pVCpu->iem.s.enmEffOpSize)
6981 {
6982 case IEMMODE_16BIT:
6983 IEM_MC_BEGIN(0, 2);
6984 IEM_MC_LOCAL(uint16_t, u16Value);
6985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6988 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6989 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6990 IEM_MC_ADVANCE_RIP();
6991 IEM_MC_END();
6992 return VINF_SUCCESS;
6993
6994 case IEMMODE_32BIT:
6995 IEM_MC_BEGIN(0, 2);
6996 IEM_MC_LOCAL(uint32_t, u32Value);
6997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7000 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7001 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7002 IEM_MC_ADVANCE_RIP();
7003 IEM_MC_END();
7004 return VINF_SUCCESS;
7005
7006 case IEMMODE_64BIT:
7007 IEM_MC_BEGIN(0, 2);
7008 IEM_MC_LOCAL(uint64_t, u64Value);
7009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7012 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7013 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7014 IEM_MC_ADVANCE_RIP();
7015 IEM_MC_END();
7016 return VINF_SUCCESS;
7017
7018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7019 }
7020 }
7021}
7022
7023
7024/** Opcode 0x0f 0xbf. */
7025FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7026{
7027 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7028 IEMOP_HLP_MIN_386();
7029
7030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7031
7032 /** @todo Not entirely sure how the operand size prefix is handled here,
7033 * assuming that it will be ignored. Would be nice to have a few
7034 * test for this. */
7035 /*
7036 * If rm is denoting a register, no more instruction bytes.
7037 */
7038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7039 {
7040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7041 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7042 {
7043 IEM_MC_BEGIN(0, 1);
7044 IEM_MC_LOCAL(uint32_t, u32Value);
7045 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7046 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7047 IEM_MC_ADVANCE_RIP();
7048 IEM_MC_END();
7049 }
7050 else
7051 {
7052 IEM_MC_BEGIN(0, 1);
7053 IEM_MC_LOCAL(uint64_t, u64Value);
7054 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7055 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7056 IEM_MC_ADVANCE_RIP();
7057 IEM_MC_END();
7058 }
7059 }
7060 else
7061 {
7062 /*
7063 * We're loading a register from memory.
7064 */
7065 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7066 {
7067 IEM_MC_BEGIN(0, 2);
7068 IEM_MC_LOCAL(uint32_t, u32Value);
7069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7072 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7073 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7074 IEM_MC_ADVANCE_RIP();
7075 IEM_MC_END();
7076 }
7077 else
7078 {
7079 IEM_MC_BEGIN(0, 2);
7080 IEM_MC_LOCAL(uint64_t, u64Value);
7081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7084 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7085 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7086 IEM_MC_ADVANCE_RIP();
7087 IEM_MC_END();
7088 }
7089 }
7090 return VINF_SUCCESS;
7091}
7092
7093
7094/** Opcode 0x0f 0xc0. */
7095FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7096{
7097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7098 IEMOP_HLP_MIN_486();
7099 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7100
7101 /*
7102 * If rm is denoting a register, no more instruction bytes.
7103 */
7104 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7105 {
7106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7107
7108 IEM_MC_BEGIN(3, 0);
7109 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7110 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7111 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7112
7113 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7114 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7115 IEM_MC_REF_EFLAGS(pEFlags);
7116 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7117
7118 IEM_MC_ADVANCE_RIP();
7119 IEM_MC_END();
7120 }
7121 else
7122 {
7123 /*
7124 * We're accessing memory.
7125 */
7126 IEM_MC_BEGIN(3, 3);
7127 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7128 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7129 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7130 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7132
7133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7134 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7135 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7136 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7137 IEM_MC_FETCH_EFLAGS(EFlags);
7138 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7139 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7140 else
7141 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7142
7143 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7144 IEM_MC_COMMIT_EFLAGS(EFlags);
7145 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7146 IEM_MC_ADVANCE_RIP();
7147 IEM_MC_END();
7148 return VINF_SUCCESS;
7149 }
7150 return VINF_SUCCESS;
7151}
7152
7153
7154/** Opcode 0x0f 0xc1. */
7155FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7156{
7157 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7158 IEMOP_HLP_MIN_486();
7159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7160
7161 /*
7162 * If rm is denoting a register, no more instruction bytes.
7163 */
7164 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7165 {
7166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7167
7168 switch (pVCpu->iem.s.enmEffOpSize)
7169 {
7170 case IEMMODE_16BIT:
7171 IEM_MC_BEGIN(3, 0);
7172 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7173 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7174 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7175
7176 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7177 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7178 IEM_MC_REF_EFLAGS(pEFlags);
7179 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7180
7181 IEM_MC_ADVANCE_RIP();
7182 IEM_MC_END();
7183 return VINF_SUCCESS;
7184
7185 case IEMMODE_32BIT:
7186 IEM_MC_BEGIN(3, 0);
7187 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7188 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7189 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7190
7191 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7192 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7193 IEM_MC_REF_EFLAGS(pEFlags);
7194 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7195
7196 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7197 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7198 IEM_MC_ADVANCE_RIP();
7199 IEM_MC_END();
7200 return VINF_SUCCESS;
7201
7202 case IEMMODE_64BIT:
7203 IEM_MC_BEGIN(3, 0);
7204 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7205 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7206 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7207
7208 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7209 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7210 IEM_MC_REF_EFLAGS(pEFlags);
7211 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7212
7213 IEM_MC_ADVANCE_RIP();
7214 IEM_MC_END();
7215 return VINF_SUCCESS;
7216
7217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7218 }
7219 }
7220 else
7221 {
7222 /*
7223 * We're accessing memory.
7224 */
7225 switch (pVCpu->iem.s.enmEffOpSize)
7226 {
7227 case IEMMODE_16BIT:
7228 IEM_MC_BEGIN(3, 3);
7229 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7230 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7231 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7232 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7234
7235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7236 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7237 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7238 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7239 IEM_MC_FETCH_EFLAGS(EFlags);
7240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7241 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7242 else
7243 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7244
7245 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7246 IEM_MC_COMMIT_EFLAGS(EFlags);
7247 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7248 IEM_MC_ADVANCE_RIP();
7249 IEM_MC_END();
7250 return VINF_SUCCESS;
7251
7252 case IEMMODE_32BIT:
7253 IEM_MC_BEGIN(3, 3);
7254 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7255 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7256 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7257 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7259
7260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7261 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7262 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7263 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7264 IEM_MC_FETCH_EFLAGS(EFlags);
7265 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7266 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7267 else
7268 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7269
7270 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7271 IEM_MC_COMMIT_EFLAGS(EFlags);
7272 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7273 IEM_MC_ADVANCE_RIP();
7274 IEM_MC_END();
7275 return VINF_SUCCESS;
7276
7277 case IEMMODE_64BIT:
7278 IEM_MC_BEGIN(3, 3);
7279 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7280 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7281 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7282 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7284
7285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7286 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7287 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7288 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7289 IEM_MC_FETCH_EFLAGS(EFlags);
7290 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7291 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7292 else
7293 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7294
7295 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7296 IEM_MC_COMMIT_EFLAGS(EFlags);
7297 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7298 IEM_MC_ADVANCE_RIP();
7299 IEM_MC_END();
7300 return VINF_SUCCESS;
7301
7302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7303 }
7304 }
7305}
7306
7307
7308/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7309FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7310/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7311FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7312/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7313FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7314/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7315FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7316
7317
7318/** Opcode 0x0f 0xc3. */
7319FNIEMOP_DEF(iemOp_movnti_My_Gy)
7320{
7321 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7322
7323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7324
7325 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7326 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7327 {
7328 switch (pVCpu->iem.s.enmEffOpSize)
7329 {
7330 case IEMMODE_32BIT:
7331 IEM_MC_BEGIN(0, 2);
7332 IEM_MC_LOCAL(uint32_t, u32Value);
7333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7334
7335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7337 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7338 return IEMOP_RAISE_INVALID_OPCODE();
7339
7340 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7341 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7342 IEM_MC_ADVANCE_RIP();
7343 IEM_MC_END();
7344 break;
7345
7346 case IEMMODE_64BIT:
7347 IEM_MC_BEGIN(0, 2);
7348 IEM_MC_LOCAL(uint64_t, u64Value);
7349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7350
7351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7353 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7354 return IEMOP_RAISE_INVALID_OPCODE();
7355
7356 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7357 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7358 IEM_MC_ADVANCE_RIP();
7359 IEM_MC_END();
7360 break;
7361
7362 case IEMMODE_16BIT:
7363 /** @todo check this form. */
7364 return IEMOP_RAISE_INVALID_OPCODE();
7365 }
7366 }
7367 else
7368 return IEMOP_RAISE_INVALID_OPCODE();
7369 return VINF_SUCCESS;
7370}
7371/* Opcode 0x66 0x0f 0xc3 - invalid */
7372/* Opcode 0xf3 0x0f 0xc3 - invalid */
7373/* Opcode 0xf2 0x0f 0xc3 - invalid */
7374
7375/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7376FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7377/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7378FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7379/* Opcode 0xf3 0x0f 0xc4 - invalid */
7380/* Opcode 0xf2 0x0f 0xc4 - invalid */
7381
7382/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7383FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7384/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7385FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7386/* Opcode 0xf3 0x0f 0xc5 - invalid */
7387/* Opcode 0xf2 0x0f 0xc5 - invalid */
7388
7389/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7390FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7391/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7392FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7393/* Opcode 0xf3 0x0f 0xc6 - invalid */
7394/* Opcode 0xf2 0x0f 0xc6 - invalid */
7395
7396
7397/** Opcode 0x0f 0xc7 !11/1. */
7398FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7399{
7400 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7401
7402 IEM_MC_BEGIN(4, 3);
7403 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7404 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7405 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7406 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7407 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7408 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7410
7411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7412 IEMOP_HLP_DONE_DECODING();
7413 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7414
7415 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7416 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7417 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7418
7419 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7420 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7421 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7422
7423 IEM_MC_FETCH_EFLAGS(EFlags);
7424 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7425 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7426 else
7427 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7428
7429 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7430 IEM_MC_COMMIT_EFLAGS(EFlags);
7431 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7432 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7433 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7434 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7435 IEM_MC_ENDIF();
7436 IEM_MC_ADVANCE_RIP();
7437
7438 IEM_MC_END();
7439 return VINF_SUCCESS;
7440}
7441
7442
7443/** Opcode REX.W 0x0f 0xc7 !11/1. */
7444FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7445{
7446 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7447 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7448 {
7449#if 0
7450 RT_NOREF(bRm);
7451 IEMOP_BITCH_ABOUT_STUB();
7452 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7453#else
7454 IEM_MC_BEGIN(4, 3);
7455 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7456 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7457 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7458 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7459 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7460 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7462
7463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7464 IEMOP_HLP_DONE_DECODING();
7465 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7466 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7467
7468 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7469 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7470 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7471
7472 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7473 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7474 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7475
7476 IEM_MC_FETCH_EFLAGS(EFlags);
7477# ifdef RT_ARCH_AMD64
7478 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7479 {
7480 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7481 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7482 else
7483 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7484 }
7485 else
7486# endif
7487 {
7488 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7489 accesses and not all all atomic, which works fine on in UNI CPU guest
7490 configuration (ignoring DMA). If guest SMP is active we have no choice
7491 but to use a rendezvous callback here. Sigh. */
7492 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7493 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7494 else
7495 {
7496 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7497 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7498 }
7499 }
7500
7501 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7502 IEM_MC_COMMIT_EFLAGS(EFlags);
7503 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7504 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7505 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7506 IEM_MC_ENDIF();
7507 IEM_MC_ADVANCE_RIP();
7508
7509 IEM_MC_END();
7510 return VINF_SUCCESS;
7511#endif
7512 }
7513 Log(("cmpxchg16b -> #UD\n"));
7514 return IEMOP_RAISE_INVALID_OPCODE();
7515}
7516
7517
7518/** Opcode 0x0f 0xc7 11/6. */
7519FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7520
7521/** Opcode 0x0f 0xc7 !11/6. */
7522FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7523
7524/** Opcode 0x66 0x0f 0xc7 !11/6. */
7525FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7526
7527/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7528FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7529
7530/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7531FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7532
7533
7534/** Opcode 0x0f 0xc7. */
7535FNIEMOP_DEF(iemOp_Grp9)
7536{
7537 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7539 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7540 {
7541 case 0: case 2: case 3: case 4: case 5:
7542 return IEMOP_RAISE_INVALID_OPCODE();
7543 case 1:
7544 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7545 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7546 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7547 return IEMOP_RAISE_INVALID_OPCODE();
7548 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7549 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7550 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7551 case 6:
7552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7553 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7554 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7555 {
7556 case 0:
7557 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7558 case IEM_OP_PRF_SIZE_OP:
7559 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7560 case IEM_OP_PRF_REPZ:
7561 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7562 default:
7563 return IEMOP_RAISE_INVALID_OPCODE();
7564 }
7565 case 7:
7566 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7567 {
7568 case 0:
7569 case IEM_OP_PRF_REPZ:
7570 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7571 default:
7572 return IEMOP_RAISE_INVALID_OPCODE();
7573 }
7574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7575 }
7576}
7577
7578
7579/**
7580 * Common 'bswap register' helper.
7581 */
7582FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7583{
7584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7585 switch (pVCpu->iem.s.enmEffOpSize)
7586 {
7587 case IEMMODE_16BIT:
7588 IEM_MC_BEGIN(1, 0);
7589 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7590 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7591 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7592 IEM_MC_ADVANCE_RIP();
7593 IEM_MC_END();
7594 return VINF_SUCCESS;
7595
7596 case IEMMODE_32BIT:
7597 IEM_MC_BEGIN(1, 0);
7598 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7599 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7600 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7601 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7602 IEM_MC_ADVANCE_RIP();
7603 IEM_MC_END();
7604 return VINF_SUCCESS;
7605
7606 case IEMMODE_64BIT:
7607 IEM_MC_BEGIN(1, 0);
7608 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7609 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7610 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7611 IEM_MC_ADVANCE_RIP();
7612 IEM_MC_END();
7613 return VINF_SUCCESS;
7614
7615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7616 }
7617}
7618
7619
7620/** Opcode 0x0f 0xc8. */
7621FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7622{
7623 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7624 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7625 prefix. REX.B is the correct prefix it appears. For a parallel
7626 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7627 IEMOP_HLP_MIN_486();
7628 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7629}
7630
7631
7632/** Opcode 0x0f 0xc9. */
7633FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7634{
7635 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7636 IEMOP_HLP_MIN_486();
7637 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7638}
7639
7640
7641/** Opcode 0x0f 0xca. */
7642FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7643{
7644 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7645 IEMOP_HLP_MIN_486();
7646 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7647}
7648
7649
7650/** Opcode 0x0f 0xcb. */
7651FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7652{
7653 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7654 IEMOP_HLP_MIN_486();
7655 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7656}
7657
7658
7659/** Opcode 0x0f 0xcc. */
7660FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7661{
7662 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7663 IEMOP_HLP_MIN_486();
7664 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7665}
7666
7667
7668/** Opcode 0x0f 0xcd. */
7669FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7670{
7671 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7672 IEMOP_HLP_MIN_486();
7673 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7674}
7675
7676
7677/** Opcode 0x0f 0xce. */
7678FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7679{
7680 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7681 IEMOP_HLP_MIN_486();
7682 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7683}
7684
7685
7686/** Opcode 0x0f 0xcf. */
7687FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7688{
7689 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7690 IEMOP_HLP_MIN_486();
7691 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7692}
7693
7694
7695/* Opcode 0x0f 0xd0 - invalid */
7696/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7697FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7698/* Opcode 0xf3 0x0f 0xd0 - invalid */
7699/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7700FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7701
7702/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7703FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7704/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7705FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7706/* Opcode 0xf3 0x0f 0xd1 - invalid */
7707/* Opcode 0xf2 0x0f 0xd1 - invalid */
7708
7709/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7710FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7711/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7712FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7713/* Opcode 0xf3 0x0f 0xd2 - invalid */
7714/* Opcode 0xf2 0x0f 0xd2 - invalid */
7715
7716/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7717FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7718/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7719FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7720/* Opcode 0xf3 0x0f 0xd3 - invalid */
7721/* Opcode 0xf2 0x0f 0xd3 - invalid */
7722
7723/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7724FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7725/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7726FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7727/* Opcode 0xf3 0x0f 0xd4 - invalid */
7728/* Opcode 0xf2 0x0f 0xd4 - invalid */
7729
7730/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7731FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7732/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7733FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7734/* Opcode 0xf3 0x0f 0xd5 - invalid */
7735/* Opcode 0xf2 0x0f 0xd5 - invalid */
7736
7737/* Opcode 0x0f 0xd6 - invalid */
7738/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7739FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7740/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7741FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7742/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7743FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7744#if 0
7745FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7746{
7747 /* Docs says register only. */
7748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7749
7750 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7751 {
7752 case IEM_OP_PRF_SIZE_OP: /* SSE */
7753 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7754 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7755 IEM_MC_BEGIN(2, 0);
7756 IEM_MC_ARG(uint64_t *, pDst, 0);
7757 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7758 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7759 IEM_MC_PREPARE_SSE_USAGE();
7760 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7761 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7762 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7763 IEM_MC_ADVANCE_RIP();
7764 IEM_MC_END();
7765 return VINF_SUCCESS;
7766
7767 case 0: /* MMX */
7768 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7769 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7770 IEM_MC_BEGIN(2, 0);
7771 IEM_MC_ARG(uint64_t *, pDst, 0);
7772 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7773 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7774 IEM_MC_PREPARE_FPU_USAGE();
7775 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7776 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7777 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7778 IEM_MC_ADVANCE_RIP();
7779 IEM_MC_END();
7780 return VINF_SUCCESS;
7781
7782 default:
7783 return IEMOP_RAISE_INVALID_OPCODE();
7784 }
7785}
7786#endif
7787
7788
7789/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
7790FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
7791{
7792 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7793 /** @todo testcase: Check that the instruction implicitly clears the high
7794 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7795 * and opcode modifications are made to work with the whole width (not
7796 * just 128). */
7797 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
7798 /* Docs says register only. */
7799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7801 {
7802 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7803 IEM_MC_BEGIN(2, 0);
7804 IEM_MC_ARG(uint64_t *, pDst, 0);
7805 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7806 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7807 IEM_MC_PREPARE_FPU_USAGE();
7808 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7809 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7810 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7811 IEM_MC_ADVANCE_RIP();
7812 IEM_MC_END();
7813 return VINF_SUCCESS;
7814 }
7815 return IEMOP_RAISE_INVALID_OPCODE();
7816}
7817
7818/** Opcode 0x66 0x0f 0xd7 - */
7819FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
7820{
7821 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7822 /** @todo testcase: Check that the instruction implicitly clears the high
7823 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7824 * and opcode modifications are made to work with the whole width (not
7825 * just 128). */
7826 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
7827 /* Docs says register only. */
7828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7830 {
7831 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7832 IEM_MC_BEGIN(2, 0);
7833 IEM_MC_ARG(uint64_t *, pDst, 0);
7834 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7835 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7836 IEM_MC_PREPARE_SSE_USAGE();
7837 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7838 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7839 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7840 IEM_MC_ADVANCE_RIP();
7841 IEM_MC_END();
7842 return VINF_SUCCESS;
7843 }
7844 return IEMOP_RAISE_INVALID_OPCODE();
7845}
7846
7847/* Opcode 0xf3 0x0f 0xd7 - invalid */
7848/* Opcode 0xf2 0x0f 0xd7 - invalid */
7849
7850
7851/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7852FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7853/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7854FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7855/* Opcode 0xf3 0x0f 0xd8 - invalid */
7856/* Opcode 0xf2 0x0f 0xd8 - invalid */
7857
7858/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7859FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7860/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7861FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7862/* Opcode 0xf3 0x0f 0xd9 - invalid */
7863/* Opcode 0xf2 0x0f 0xd9 - invalid */
7864
7865/** Opcode 0x0f 0xda - pminub Pq, Qq */
7866FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7867/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7868FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7869/* Opcode 0xf3 0x0f 0xda - invalid */
7870/* Opcode 0xf2 0x0f 0xda - invalid */
7871
7872/** Opcode 0x0f 0xdb - pand Pq, Qq */
7873FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7874/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7875FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7876/* Opcode 0xf3 0x0f 0xdb - invalid */
7877/* Opcode 0xf2 0x0f 0xdb - invalid */
7878
7879/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7880FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7881/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7882FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7883/* Opcode 0xf3 0x0f 0xdc - invalid */
7884/* Opcode 0xf2 0x0f 0xdc - invalid */
7885
7886/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7887FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7888/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7889FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7890/* Opcode 0xf3 0x0f 0xdd - invalid */
7891/* Opcode 0xf2 0x0f 0xdd - invalid */
7892
7893/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7894FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7895/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7896FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7897/* Opcode 0xf3 0x0f 0xde - invalid */
7898/* Opcode 0xf2 0x0f 0xde - invalid */
7899
7900/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7901FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7902/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7903FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7904/* Opcode 0xf3 0x0f 0xdf - invalid */
7905/* Opcode 0xf2 0x0f 0xdf - invalid */
7906
7907/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7908FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7909/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7910FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7911/* Opcode 0xf3 0x0f 0xe0 - invalid */
7912/* Opcode 0xf2 0x0f 0xe0 - invalid */
7913
7914/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7915FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7916/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7917FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7918/* Opcode 0xf3 0x0f 0xe1 - invalid */
7919/* Opcode 0xf2 0x0f 0xe1 - invalid */
7920
7921/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7922FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7923/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7924FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7925/* Opcode 0xf3 0x0f 0xe2 - invalid */
7926/* Opcode 0xf2 0x0f 0xe2 - invalid */
7927
7928/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7929FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7930/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7931FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7932/* Opcode 0xf3 0x0f 0xe3 - invalid */
7933/* Opcode 0xf2 0x0f 0xe3 - invalid */
7934
7935/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7936FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7937/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7938FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7939/* Opcode 0xf3 0x0f 0xe4 - invalid */
7940/* Opcode 0xf2 0x0f 0xe4 - invalid */
7941
7942/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7943FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7944/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7945FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7946/* Opcode 0xf3 0x0f 0xe5 - invalid */
7947/* Opcode 0xf2 0x0f 0xe5 - invalid */
7948
7949/* Opcode 0x0f 0xe6 - invalid */
7950/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7951FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7952/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7953FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7954/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7955FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7956
7957
7958/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
7959FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
7960{
7961 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7963 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7964 {
7965 /* Register, memory. */
7966 IEM_MC_BEGIN(0, 2);
7967 IEM_MC_LOCAL(uint64_t, uSrc);
7968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7969
7970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7972 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7973 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7974
7975 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7976 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7977
7978 IEM_MC_ADVANCE_RIP();
7979 IEM_MC_END();
7980 return VINF_SUCCESS;
7981 }
7982 /* The register, register encoding is invalid. */
7983 return IEMOP_RAISE_INVALID_OPCODE();
7984}
7985
7986/** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */
7987FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
7988{
7989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7990 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7991 {
7992 /* Register, memory. */
7993 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
7994 IEM_MC_BEGIN(0, 2);
7995 IEM_MC_LOCAL(uint128_t, uSrc);
7996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7997
7998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8000 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8001 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8002
8003 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8004 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8005
8006 IEM_MC_ADVANCE_RIP();
8007 IEM_MC_END();
8008 return VINF_SUCCESS;
8009 }
8010
8011 /* The register, register encoding is invalid. */
8012 return IEMOP_RAISE_INVALID_OPCODE();
8013}
8014
8015/* Opcode 0xf3 0x0f 0xe7 - invalid */
8016/* Opcode 0xf2 0x0f 0xe7 - invalid */
8017
8018
8019/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8020FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8021/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
8022FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
8023/* Opcode 0xf3 0x0f 0xe8 - invalid */
8024/* Opcode 0xf2 0x0f 0xe8 - invalid */
8025
8026/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8027FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8028/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
8029FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
8030/* Opcode 0xf3 0x0f 0xe9 - invalid */
8031/* Opcode 0xf2 0x0f 0xe9 - invalid */
8032
8033/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8034FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8035/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
8036FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
8037/* Opcode 0xf3 0x0f 0xea - invalid */
8038/* Opcode 0xf2 0x0f 0xea - invalid */
8039
8040/** Opcode 0x0f 0xeb - por Pq, Qq */
8041FNIEMOP_STUB(iemOp_por_Pq_Qq);
8042/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
8043FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
8044/* Opcode 0xf3 0x0f 0xeb - invalid */
8045/* Opcode 0xf2 0x0f 0xeb - invalid */
8046
8047/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8048FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8049/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
8050FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
8051/* Opcode 0xf3 0x0f 0xec - invalid */
8052/* Opcode 0xf2 0x0f 0xec - invalid */
8053
8054/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8055FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8056/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
8057FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
8058/* Opcode 0xf3 0x0f 0xed - invalid */
8059/* Opcode 0xf2 0x0f 0xed - invalid */
8060
8061/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8062FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8063/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
8064FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
8065/* Opcode 0xf3 0x0f 0xee - invalid */
8066/* Opcode 0xf2 0x0f 0xee - invalid */
8067
8068
8069/** Opcode 0x0f 0xef - pxor Pq, Qq */
8070FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8071{
8072 IEMOP_MNEMONIC(pxor, "pxor");
8073 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8074}
8075
8076/** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */
8077FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
8078{
8079 IEMOP_MNEMONIC(vpxor, "vpxor");
8080 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8081}
8082
8083/* Opcode 0xf3 0x0f 0xef - invalid */
8084/* Opcode 0xf2 0x0f 0xef - invalid */
8085
8086/* Opcode 0x0f 0xf0 - invalid */
8087/* Opcode 0x66 0x0f 0xf0 - invalid */
8088/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
8089FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
8090
8091/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8092FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8093/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
8094FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
8095/* Opcode 0xf2 0x0f 0xf1 - invalid */
8096
8097/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8098FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8099/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
8100FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
8101/* Opcode 0xf2 0x0f 0xf2 - invalid */
8102
8103/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8104FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8105/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
8106FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
8107/* Opcode 0xf2 0x0f 0xf3 - invalid */
8108
8109/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8110FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8111/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
8112FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
8113/* Opcode 0xf2 0x0f 0xf4 - invalid */
8114
8115/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8116FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8117/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
8118FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
8119/* Opcode 0xf2 0x0f 0xf5 - invalid */
8120
8121/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8122FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8123/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
8124FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
8125/* Opcode 0xf2 0x0f 0xf6 - invalid */
8126
8127/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8128FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8129/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
8130FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
8131/* Opcode 0xf2 0x0f 0xf7 - invalid */
8132
8133/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8134FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8135/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
8136FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
8137/* Opcode 0xf2 0x0f 0xf8 - invalid */
8138
8139/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8140FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8141/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
8142FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
8143/* Opcode 0xf2 0x0f 0xf9 - invalid */
8144
8145/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8146FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8147/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
8148FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
8149/* Opcode 0xf2 0x0f 0xfa - invalid */
8150
8151/** Opcode 0x0f 0xfb - psubq Pq, Qq */
8152FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
8153/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
8154FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
8155/* Opcode 0xf2 0x0f 0xfb - invalid */
8156
8157/** Opcode 0x0f 0xfc - paddb Pq, Qq */
8158FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
8159/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
8160FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
8161/* Opcode 0xf2 0x0f 0xfc - invalid */
8162
8163/** Opcode 0x0f 0xfd - paddw Pq, Qq */
8164FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
8165/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
8166FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
8167/* Opcode 0xf2 0x0f 0xfd - invalid */
8168
8169/** Opcode 0x0f 0xfe - paddd Pq, Qq */
8170FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
8171/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
8172FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
8173/* Opcode 0xf2 0x0f 0xfe - invalid */
8174
8175
8176/** Opcode **** 0x0f 0xff - UD0 */
8177FNIEMOP_DEF(iemOp_ud0)
8178{
8179 IEMOP_MNEMONIC(ud0, "ud0");
8180 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
8181 {
8182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
8183#ifndef TST_IEM_CHECK_MC
8184 RTGCPTR GCPtrEff;
8185 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
8186 if (rcStrict != VINF_SUCCESS)
8187 return rcStrict;
8188#endif
8189 IEMOP_HLP_DONE_DECODING();
8190 }
8191 return IEMOP_RAISE_INVALID_OPCODE();
8192}
8193
8194
8195
8196/** Repeats a_fn four times. For decoding tables. */
8197#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
8198
8199IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
8200{
8201 /* no prefix, 066h prefix f3h prefix, f2h prefix */
8202 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
8203 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
8204 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
8205 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
8206 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
8207 /* 0x05 */ IEMOP_X4(iemOp_syscall),
8208 /* 0x06 */ IEMOP_X4(iemOp_clts),
8209 /* 0x07 */ IEMOP_X4(iemOp_sysret),
8210 /* 0x08 */ IEMOP_X4(iemOp_invd),
8211 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
8212 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
8213 /* 0x0b */ IEMOP_X4(iemOp_ud2),
8214 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
8215 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
8216 /* 0x0e */ IEMOP_X4(iemOp_femms),
8217 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
8218
8219 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
8220 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
8221 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
8222 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8223 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8224 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8225 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
8226 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8227 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
8228 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8229 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8230 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8231 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8232 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8233 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8234 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8235
8236 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8237 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8238 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8239 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8240 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8241 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8242 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8243 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8244 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8245 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8246 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8247 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8248 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8249 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8250 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8251 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8252
8253 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8254 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8255 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8256 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8257 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8258 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8259 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8260 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8261 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8262 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8263 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8264 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8265 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8266 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8267 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8268 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8269
8270 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8271 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8272 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8273 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8274 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8275 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8276 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8277 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8278 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8279 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8280 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8281 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8282 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8283 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8284 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8285 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8286
8287 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8288 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8289 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8290 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8291 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8292 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8293 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8294 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8295 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8296 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8297 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8298 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8299 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8300 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8301 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8302 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8303
8304 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8305 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8306 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8307 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8308 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8309 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8310 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8311 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8312 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8313 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8314 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8315 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8316 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8317 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8318 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8319 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
8320
8321 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
8322 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8323 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8324 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8325 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8326 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8327 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8328 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8329
8330 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8331 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8332 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8333 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8334 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8335 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8336 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
8337 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
8338
8339 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8340 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8341 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8342 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8343 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8344 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8345 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8346 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8347 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8348 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8349 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8350 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8351 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8352 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8353 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8354 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8355
8356 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8357 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8358 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8359 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8360 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8361 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8362 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8363 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8364 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8365 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8366 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8367 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8368 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8369 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8370 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8371 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8372
8373 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8374 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8375 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8376 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8377 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8378 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8379 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8380 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8381 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8382 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8383 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8384 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8385 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8386 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8387 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8388 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8389
8390 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8391 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8392 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8393 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8394 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8395 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8396 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8397 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8398 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8399 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8400 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8401 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8402 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8403 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8404 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8405 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8406
8407 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8408 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8409 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8410 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8411 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8412 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8413 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8414 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8415 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8416 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8417 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8418 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8419 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8420 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8421 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8422 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8423
8424 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8425 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8426 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8427 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8428 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8429 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8430 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8431 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8432 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8433 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8434 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8435 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8436 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8437 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8438 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8439 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8440
8441 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8442 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8443 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8444 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8445 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8446 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8447 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8448 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8449 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8450 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8451 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8452 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8453 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8454 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8455 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8456 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8457
8458 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8459 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8460 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8461 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8462 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8463 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8464 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8465 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8466 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8467 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8468 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8469 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8470 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8471 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8472 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8473 /* 0xff */ IEMOP_X4(iemOp_ud0),
8474};
8475AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8476/** @} */
8477
8478
8479/** @name One byte opcodes.
8480 *
8481 * @{
8482 */
8483
8484/** Opcode 0x00. */
8485FNIEMOP_DEF(iemOp_add_Eb_Gb)
8486{
8487 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8488 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8489}
8490
8491
8492/** Opcode 0x01. */
8493FNIEMOP_DEF(iemOp_add_Ev_Gv)
8494{
8495 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8496 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8497}
8498
8499
8500/** Opcode 0x02. */
8501FNIEMOP_DEF(iemOp_add_Gb_Eb)
8502{
8503 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8504 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8505}
8506
8507
8508/** Opcode 0x03. */
8509FNIEMOP_DEF(iemOp_add_Gv_Ev)
8510{
8511 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8512 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8513}
8514
8515
8516/** Opcode 0x04. */
8517FNIEMOP_DEF(iemOp_add_Al_Ib)
8518{
8519 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8520 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8521}
8522
8523
8524/** Opcode 0x05. */
8525FNIEMOP_DEF(iemOp_add_eAX_Iz)
8526{
8527 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8528 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8529}
8530
8531
8532/** Opcode 0x06. */
8533FNIEMOP_DEF(iemOp_push_ES)
8534{
8535 IEMOP_MNEMONIC(push_es, "push es");
8536 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8537}
8538
8539
8540/** Opcode 0x07. */
8541FNIEMOP_DEF(iemOp_pop_ES)
8542{
8543 IEMOP_MNEMONIC(pop_es, "pop es");
8544 IEMOP_HLP_NO_64BIT();
8545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8546 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8547}
8548
8549
8550/** Opcode 0x08. */
8551FNIEMOP_DEF(iemOp_or_Eb_Gb)
8552{
8553 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8554 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8555 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8556}
8557
8558
8559/** Opcode 0x09. */
8560FNIEMOP_DEF(iemOp_or_Ev_Gv)
8561{
8562 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8563 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8564 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8565}
8566
8567
8568/** Opcode 0x0a. */
8569FNIEMOP_DEF(iemOp_or_Gb_Eb)
8570{
8571 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8572 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8573 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8574}
8575
8576
8577/** Opcode 0x0b. */
8578FNIEMOP_DEF(iemOp_or_Gv_Ev)
8579{
8580 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8581 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8582 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8583}
8584
8585
8586/** Opcode 0x0c. */
8587FNIEMOP_DEF(iemOp_or_Al_Ib)
8588{
8589 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8592}
8593
8594
8595/** Opcode 0x0d. */
8596FNIEMOP_DEF(iemOp_or_eAX_Iz)
8597{
8598 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8599 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8600 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8601}
8602
8603
8604/** Opcode 0x0e. */
8605FNIEMOP_DEF(iemOp_push_CS)
8606{
8607 IEMOP_MNEMONIC(push_cs, "push cs");
8608 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8609}
8610
8611
8612/** Opcode 0x0f. */
8613FNIEMOP_DEF(iemOp_2byteEscape)
8614{
8615#ifdef VBOX_STRICT
8616 static bool s_fTested = false;
8617 if (RT_LIKELY(s_fTested)) { /* likely */ }
8618 else
8619 {
8620 s_fTested = true;
8621 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8622 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8623 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8624 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8625 }
8626#endif
8627
8628 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8629
8630 /** @todo PUSH CS on 8086, undefined on 80186. */
8631 IEMOP_HLP_MIN_286();
8632 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8633}
8634
8635/** Opcode 0x10. */
8636FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8637{
8638 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8639 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8640}
8641
8642
8643/** Opcode 0x11. */
8644FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8645{
8646 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8647 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8648}
8649
8650
8651/** Opcode 0x12. */
8652FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8653{
8654 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8655 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8656}
8657
8658
8659/** Opcode 0x13. */
8660FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8661{
8662 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8663 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8664}
8665
8666
8667/** Opcode 0x14. */
8668FNIEMOP_DEF(iemOp_adc_Al_Ib)
8669{
8670 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8671 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8672}
8673
8674
8675/** Opcode 0x15. */
8676FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8677{
8678 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8679 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8680}
8681
8682
8683/** Opcode 0x16. */
8684FNIEMOP_DEF(iemOp_push_SS)
8685{
8686 IEMOP_MNEMONIC(push_ss, "push ss");
8687 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8688}
8689
8690
8691/** Opcode 0x17. */
8692FNIEMOP_DEF(iemOp_pop_SS)
8693{
8694 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8696 IEMOP_HLP_NO_64BIT();
8697 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8698}
8699
8700
8701/** Opcode 0x18. */
8702FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8703{
8704 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8705 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8706}
8707
8708
8709/** Opcode 0x19. */
8710FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8711{
8712 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8714}
8715
8716
8717/** Opcode 0x1a. */
8718FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8719{
8720 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8721 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8722}
8723
8724
8725/** Opcode 0x1b. */
8726FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8727{
8728 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8729 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8730}
8731
8732
8733/** Opcode 0x1c. */
8734FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8735{
8736 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8738}
8739
8740
8741/** Opcode 0x1d. */
8742FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8743{
8744 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8745 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8746}
8747
8748
8749/** Opcode 0x1e. */
8750FNIEMOP_DEF(iemOp_push_DS)
8751{
8752 IEMOP_MNEMONIC(push_ds, "push ds");
8753 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8754}
8755
8756
8757/** Opcode 0x1f. */
8758FNIEMOP_DEF(iemOp_pop_DS)
8759{
8760 IEMOP_MNEMONIC(pop_ds, "pop ds");
8761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8762 IEMOP_HLP_NO_64BIT();
8763 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8764}
8765
8766
8767/** Opcode 0x20. */
8768FNIEMOP_DEF(iemOp_and_Eb_Gb)
8769{
8770 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8771 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8772 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8773}
8774
8775
8776/** Opcode 0x21. */
8777FNIEMOP_DEF(iemOp_and_Ev_Gv)
8778{
8779 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8780 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8781 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8782}
8783
8784
8785/** Opcode 0x22. */
8786FNIEMOP_DEF(iemOp_and_Gb_Eb)
8787{
8788 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8789 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8790 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8791}
8792
8793
8794/** Opcode 0x23. */
8795FNIEMOP_DEF(iemOp_and_Gv_Ev)
8796{
8797 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8798 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8799 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8800}
8801
8802
8803/** Opcode 0x24. */
8804FNIEMOP_DEF(iemOp_and_Al_Ib)
8805{
8806 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8807 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8808 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8809}
8810
8811
8812/** Opcode 0x25. */
8813FNIEMOP_DEF(iemOp_and_eAX_Iz)
8814{
8815 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8816 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8817 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8818}
8819
8820
8821/** Opcode 0x26. */
8822FNIEMOP_DEF(iemOp_seg_ES)
8823{
8824 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8826 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8827
8828 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8829 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8830}
8831
8832
8833/** Opcode 0x27. */
8834FNIEMOP_DEF(iemOp_daa)
8835{
8836 IEMOP_MNEMONIC(daa_AL, "daa AL");
8837 IEMOP_HLP_NO_64BIT();
8838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8840 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8841}
8842
8843
8844/** Opcode 0x28. */
8845FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8846{
8847 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8848 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8849}
8850
8851
8852/** Opcode 0x29. */
8853FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8854{
8855 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8856 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8857}
8858
8859
8860/** Opcode 0x2a. */
8861FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8862{
8863 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8864 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8865}
8866
8867
8868/** Opcode 0x2b. */
8869FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8870{
8871 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8872 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8873}
8874
8875
8876/** Opcode 0x2c. */
8877FNIEMOP_DEF(iemOp_sub_Al_Ib)
8878{
8879 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8880 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8881}
8882
8883
8884/** Opcode 0x2d. */
8885FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8886{
8887 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8888 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8889}
8890
8891
8892/** Opcode 0x2e. */
8893FNIEMOP_DEF(iemOp_seg_CS)
8894{
8895 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8896 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8897 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8898
8899 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8900 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8901}
8902
8903
8904/** Opcode 0x2f. */
8905FNIEMOP_DEF(iemOp_das)
8906{
8907 IEMOP_MNEMONIC(das_AL, "das AL");
8908 IEMOP_HLP_NO_64BIT();
8909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8911 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8912}
8913
8914
8915/** Opcode 0x30. */
8916FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8917{
8918 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8919 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8920 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8921}
8922
8923
8924/** Opcode 0x31. */
8925FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8926{
8927 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8928 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8929 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8930}
8931
8932
8933/** Opcode 0x32. */
8934FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8935{
8936 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8937 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8938 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8939}
8940
8941
8942/** Opcode 0x33. */
8943FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8944{
8945 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8946 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8947 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8948}
8949
8950
8951/** Opcode 0x34. */
8952FNIEMOP_DEF(iemOp_xor_Al_Ib)
8953{
8954 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8955 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8956 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8957}
8958
8959
8960/** Opcode 0x35. */
8961FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8962{
8963 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8964 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8965 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8966}
8967
8968
8969/** Opcode 0x36. */
8970FNIEMOP_DEF(iemOp_seg_SS)
8971{
8972 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8974 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8975
8976 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8977 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8978}
8979
8980
8981/** Opcode 0x37. */
8982FNIEMOP_STUB(iemOp_aaa);
8983
8984
8985/** Opcode 0x38. */
8986FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8987{
8988 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8989 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8990}
8991
8992
8993/** Opcode 0x39. */
8994FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8995{
8996 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8997 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8998}
8999
9000
9001/** Opcode 0x3a. */
9002FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
9003{
9004 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
9005 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
9006}
9007
9008
9009/** Opcode 0x3b. */
9010FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
9011{
9012 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
9013 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
9014}
9015
9016
9017/** Opcode 0x3c. */
9018FNIEMOP_DEF(iemOp_cmp_Al_Ib)
9019{
9020 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
9021 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
9022}
9023
9024
9025/** Opcode 0x3d. */
9026FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
9027{
9028 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
9029 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
9030}
9031
9032
9033/** Opcode 0x3e. */
9034FNIEMOP_DEF(iemOp_seg_DS)
9035{
9036 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
9037 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
9038 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
9039
9040 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9041 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9042}
9043
9044
9045/** Opcode 0x3f. */
9046FNIEMOP_STUB(iemOp_aas);
9047
9048/**
9049 * Common 'inc/dec/not/neg register' helper.
9050 */
9051FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
9052{
9053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9054 switch (pVCpu->iem.s.enmEffOpSize)
9055 {
9056 case IEMMODE_16BIT:
9057 IEM_MC_BEGIN(2, 0);
9058 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9059 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9060 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9061 IEM_MC_REF_EFLAGS(pEFlags);
9062 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
9063 IEM_MC_ADVANCE_RIP();
9064 IEM_MC_END();
9065 return VINF_SUCCESS;
9066
9067 case IEMMODE_32BIT:
9068 IEM_MC_BEGIN(2, 0);
9069 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9070 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9071 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9072 IEM_MC_REF_EFLAGS(pEFlags);
9073 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
9074 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9075 IEM_MC_ADVANCE_RIP();
9076 IEM_MC_END();
9077 return VINF_SUCCESS;
9078
9079 case IEMMODE_64BIT:
9080 IEM_MC_BEGIN(2, 0);
9081 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9082 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9083 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9084 IEM_MC_REF_EFLAGS(pEFlags);
9085 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
9086 IEM_MC_ADVANCE_RIP();
9087 IEM_MC_END();
9088 return VINF_SUCCESS;
9089 }
9090 return VINF_SUCCESS;
9091}
9092
9093
9094/** Opcode 0x40. */
9095FNIEMOP_DEF(iemOp_inc_eAX)
9096{
9097 /*
9098 * This is a REX prefix in 64-bit mode.
9099 */
9100 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9101 {
9102 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
9103 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
9104
9105 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9106 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9107 }
9108
9109 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
9110 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
9111}
9112
9113
9114/** Opcode 0x41. */
9115FNIEMOP_DEF(iemOp_inc_eCX)
9116{
9117 /*
9118 * This is a REX prefix in 64-bit mode.
9119 */
9120 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9121 {
9122 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
9123 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
9124 pVCpu->iem.s.uRexB = 1 << 3;
9125
9126 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9127 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9128 }
9129
9130 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
9131 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
9132}
9133
9134
9135/** Opcode 0x42. */
9136FNIEMOP_DEF(iemOp_inc_eDX)
9137{
9138 /*
9139 * This is a REX prefix in 64-bit mode.
9140 */
9141 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9142 {
9143 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
9144 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
9145 pVCpu->iem.s.uRexIndex = 1 << 3;
9146
9147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9149 }
9150
9151 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
9152 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
9153}
9154
9155
9156
9157/** Opcode 0x43. */
9158FNIEMOP_DEF(iemOp_inc_eBX)
9159{
9160 /*
9161 * This is a REX prefix in 64-bit mode.
9162 */
9163 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9164 {
9165 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
9166 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9167 pVCpu->iem.s.uRexB = 1 << 3;
9168 pVCpu->iem.s.uRexIndex = 1 << 3;
9169
9170 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9171 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9172 }
9173
9174 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
9175 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
9176}
9177
9178
9179/** Opcode 0x44. */
9180FNIEMOP_DEF(iemOp_inc_eSP)
9181{
9182 /*
9183 * This is a REX prefix in 64-bit mode.
9184 */
9185 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9186 {
9187 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
9188 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
9189 pVCpu->iem.s.uRexReg = 1 << 3;
9190
9191 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9192 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9193 }
9194
9195 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
9196 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
9197}
9198
9199
9200/** Opcode 0x45. */
9201FNIEMOP_DEF(iemOp_inc_eBP)
9202{
9203 /*
9204 * This is a REX prefix in 64-bit mode.
9205 */
9206 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9207 {
9208 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
9209 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
9210 pVCpu->iem.s.uRexReg = 1 << 3;
9211 pVCpu->iem.s.uRexB = 1 << 3;
9212
9213 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9214 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9215 }
9216
9217 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
9218 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
9219}
9220
9221
9222/** Opcode 0x46. */
9223FNIEMOP_DEF(iemOp_inc_eSI)
9224{
9225 /*
9226 * This is a REX prefix in 64-bit mode.
9227 */
9228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9229 {
9230 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
9231 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
9232 pVCpu->iem.s.uRexReg = 1 << 3;
9233 pVCpu->iem.s.uRexIndex = 1 << 3;
9234
9235 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9236 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9237 }
9238
9239 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
9240 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
9241}
9242
9243
9244/** Opcode 0x47. */
9245FNIEMOP_DEF(iemOp_inc_eDI)
9246{
9247 /*
9248 * This is a REX prefix in 64-bit mode.
9249 */
9250 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9251 {
9252 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9253 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9254 pVCpu->iem.s.uRexReg = 1 << 3;
9255 pVCpu->iem.s.uRexB = 1 << 3;
9256 pVCpu->iem.s.uRexIndex = 1 << 3;
9257
9258 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9259 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9260 }
9261
9262 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9263 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9264}
9265
9266
9267/** Opcode 0x48. */
9268FNIEMOP_DEF(iemOp_dec_eAX)
9269{
9270 /*
9271 * This is a REX prefix in 64-bit mode.
9272 */
9273 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9274 {
9275 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9276 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9277 iemRecalEffOpSize(pVCpu);
9278
9279 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9280 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9281 }
9282
9283 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9284 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9285}
9286
9287
9288/** Opcode 0x49. */
9289FNIEMOP_DEF(iemOp_dec_eCX)
9290{
9291 /*
9292 * This is a REX prefix in 64-bit mode.
9293 */
9294 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9295 {
9296 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9297 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9298 pVCpu->iem.s.uRexB = 1 << 3;
9299 iemRecalEffOpSize(pVCpu);
9300
9301 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9302 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9303 }
9304
9305 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9306 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9307}
9308
9309
9310/** Opcode 0x4a. */
9311FNIEMOP_DEF(iemOp_dec_eDX)
9312{
9313 /*
9314 * This is a REX prefix in 64-bit mode.
9315 */
9316 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9317 {
9318 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9319 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9320 pVCpu->iem.s.uRexIndex = 1 << 3;
9321 iemRecalEffOpSize(pVCpu);
9322
9323 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9324 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9325 }
9326
9327 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9328 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9329}
9330
9331
9332/** Opcode 0x4b. */
9333FNIEMOP_DEF(iemOp_dec_eBX)
9334{
9335 /*
9336 * This is a REX prefix in 64-bit mode.
9337 */
9338 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9339 {
9340 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9341 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9342 pVCpu->iem.s.uRexB = 1 << 3;
9343 pVCpu->iem.s.uRexIndex = 1 << 3;
9344 iemRecalEffOpSize(pVCpu);
9345
9346 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9347 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9348 }
9349
9350 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9351 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9352}
9353
9354
9355/** Opcode 0x4c. */
9356FNIEMOP_DEF(iemOp_dec_eSP)
9357{
9358 /*
9359 * This is a REX prefix in 64-bit mode.
9360 */
9361 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9362 {
9363 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9364 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9365 pVCpu->iem.s.uRexReg = 1 << 3;
9366 iemRecalEffOpSize(pVCpu);
9367
9368 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9369 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9370 }
9371
9372 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9373 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9374}
9375
9376
9377/** Opcode 0x4d. */
9378FNIEMOP_DEF(iemOp_dec_eBP)
9379{
9380 /*
9381 * This is a REX prefix in 64-bit mode.
9382 */
9383 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9384 {
9385 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9386 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9387 pVCpu->iem.s.uRexReg = 1 << 3;
9388 pVCpu->iem.s.uRexB = 1 << 3;
9389 iemRecalEffOpSize(pVCpu);
9390
9391 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9392 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9393 }
9394
9395 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9396 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9397}
9398
9399
9400/** Opcode 0x4e. */
9401FNIEMOP_DEF(iemOp_dec_eSI)
9402{
9403 /*
9404 * This is a REX prefix in 64-bit mode.
9405 */
9406 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9407 {
9408 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9409 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9410 pVCpu->iem.s.uRexReg = 1 << 3;
9411 pVCpu->iem.s.uRexIndex = 1 << 3;
9412 iemRecalEffOpSize(pVCpu);
9413
9414 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9415 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9416 }
9417
9418 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9419 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9420}
9421
9422
9423/** Opcode 0x4f. */
9424FNIEMOP_DEF(iemOp_dec_eDI)
9425{
9426 /*
9427 * This is a REX prefix in 64-bit mode.
9428 */
9429 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9430 {
9431 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9432 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9433 pVCpu->iem.s.uRexReg = 1 << 3;
9434 pVCpu->iem.s.uRexB = 1 << 3;
9435 pVCpu->iem.s.uRexIndex = 1 << 3;
9436 iemRecalEffOpSize(pVCpu);
9437
9438 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9439 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9440 }
9441
9442 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9443 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9444}
9445
9446
9447/**
9448 * Common 'push register' helper.
9449 */
9450FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9451{
9452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9453 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9454 {
9455 iReg |= pVCpu->iem.s.uRexB;
9456 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9457 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9458 }
9459
9460 switch (pVCpu->iem.s.enmEffOpSize)
9461 {
9462 case IEMMODE_16BIT:
9463 IEM_MC_BEGIN(0, 1);
9464 IEM_MC_LOCAL(uint16_t, u16Value);
9465 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9466 IEM_MC_PUSH_U16(u16Value);
9467 IEM_MC_ADVANCE_RIP();
9468 IEM_MC_END();
9469 break;
9470
9471 case IEMMODE_32BIT:
9472 IEM_MC_BEGIN(0, 1);
9473 IEM_MC_LOCAL(uint32_t, u32Value);
9474 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9475 IEM_MC_PUSH_U32(u32Value);
9476 IEM_MC_ADVANCE_RIP();
9477 IEM_MC_END();
9478 break;
9479
9480 case IEMMODE_64BIT:
9481 IEM_MC_BEGIN(0, 1);
9482 IEM_MC_LOCAL(uint64_t, u64Value);
9483 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9484 IEM_MC_PUSH_U64(u64Value);
9485 IEM_MC_ADVANCE_RIP();
9486 IEM_MC_END();
9487 break;
9488 }
9489
9490 return VINF_SUCCESS;
9491}
9492
9493
9494/** Opcode 0x50. */
9495FNIEMOP_DEF(iemOp_push_eAX)
9496{
9497 IEMOP_MNEMONIC(push_rAX, "push rAX");
9498 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9499}
9500
9501
9502/** Opcode 0x51. */
9503FNIEMOP_DEF(iemOp_push_eCX)
9504{
9505 IEMOP_MNEMONIC(push_rCX, "push rCX");
9506 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9507}
9508
9509
9510/** Opcode 0x52. */
9511FNIEMOP_DEF(iemOp_push_eDX)
9512{
9513 IEMOP_MNEMONIC(push_rDX, "push rDX");
9514 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9515}
9516
9517
9518/** Opcode 0x53. */
9519FNIEMOP_DEF(iemOp_push_eBX)
9520{
9521 IEMOP_MNEMONIC(push_rBX, "push rBX");
9522 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9523}
9524
9525
9526/** Opcode 0x54. */
9527FNIEMOP_DEF(iemOp_push_eSP)
9528{
9529 IEMOP_MNEMONIC(push_rSP, "push rSP");
9530 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9531 {
9532 IEM_MC_BEGIN(0, 1);
9533 IEM_MC_LOCAL(uint16_t, u16Value);
9534 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9535 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9536 IEM_MC_PUSH_U16(u16Value);
9537 IEM_MC_ADVANCE_RIP();
9538 IEM_MC_END();
9539 }
9540 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9541}
9542
9543
9544/** Opcode 0x55. */
9545FNIEMOP_DEF(iemOp_push_eBP)
9546{
9547 IEMOP_MNEMONIC(push_rBP, "push rBP");
9548 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9549}
9550
9551
9552/** Opcode 0x56. */
9553FNIEMOP_DEF(iemOp_push_eSI)
9554{
9555 IEMOP_MNEMONIC(push_rSI, "push rSI");
9556 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9557}
9558
9559
9560/** Opcode 0x57. */
9561FNIEMOP_DEF(iemOp_push_eDI)
9562{
9563 IEMOP_MNEMONIC(push_rDI, "push rDI");
9564 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9565}
9566
9567
9568/**
9569 * Common 'pop register' helper.
9570 */
9571FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9572{
9573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9574 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9575 {
9576 iReg |= pVCpu->iem.s.uRexB;
9577 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9578 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9579 }
9580
9581 switch (pVCpu->iem.s.enmEffOpSize)
9582 {
9583 case IEMMODE_16BIT:
9584 IEM_MC_BEGIN(0, 1);
9585 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9586 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9587 IEM_MC_POP_U16(pu16Dst);
9588 IEM_MC_ADVANCE_RIP();
9589 IEM_MC_END();
9590 break;
9591
9592 case IEMMODE_32BIT:
9593 IEM_MC_BEGIN(0, 1);
9594 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9595 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9596 IEM_MC_POP_U32(pu32Dst);
9597 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9598 IEM_MC_ADVANCE_RIP();
9599 IEM_MC_END();
9600 break;
9601
9602 case IEMMODE_64BIT:
9603 IEM_MC_BEGIN(0, 1);
9604 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9605 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9606 IEM_MC_POP_U64(pu64Dst);
9607 IEM_MC_ADVANCE_RIP();
9608 IEM_MC_END();
9609 break;
9610 }
9611
9612 return VINF_SUCCESS;
9613}
9614
9615
9616/** Opcode 0x58. */
9617FNIEMOP_DEF(iemOp_pop_eAX)
9618{
9619 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9620 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9621}
9622
9623
9624/** Opcode 0x59. */
9625FNIEMOP_DEF(iemOp_pop_eCX)
9626{
9627 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9628 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9629}
9630
9631
9632/** Opcode 0x5a. */
9633FNIEMOP_DEF(iemOp_pop_eDX)
9634{
9635 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9636 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9637}
9638
9639
9640/** Opcode 0x5b. */
9641FNIEMOP_DEF(iemOp_pop_eBX)
9642{
9643 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9644 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9645}
9646
9647
9648/** Opcode 0x5c. */
9649FNIEMOP_DEF(iemOp_pop_eSP)
9650{
9651 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9652 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9653 {
9654 if (pVCpu->iem.s.uRexB)
9655 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9656 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9657 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9658 }
9659
9660 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9661 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9662 /** @todo add testcase for this instruction. */
9663 switch (pVCpu->iem.s.enmEffOpSize)
9664 {
9665 case IEMMODE_16BIT:
9666 IEM_MC_BEGIN(0, 1);
9667 IEM_MC_LOCAL(uint16_t, u16Dst);
9668 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9669 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9670 IEM_MC_ADVANCE_RIP();
9671 IEM_MC_END();
9672 break;
9673
9674 case IEMMODE_32BIT:
9675 IEM_MC_BEGIN(0, 1);
9676 IEM_MC_LOCAL(uint32_t, u32Dst);
9677 IEM_MC_POP_U32(&u32Dst);
9678 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9679 IEM_MC_ADVANCE_RIP();
9680 IEM_MC_END();
9681 break;
9682
9683 case IEMMODE_64BIT:
9684 IEM_MC_BEGIN(0, 1);
9685 IEM_MC_LOCAL(uint64_t, u64Dst);
9686 IEM_MC_POP_U64(&u64Dst);
9687 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9688 IEM_MC_ADVANCE_RIP();
9689 IEM_MC_END();
9690 break;
9691 }
9692
9693 return VINF_SUCCESS;
9694}
9695
9696
9697/** Opcode 0x5d. */
9698FNIEMOP_DEF(iemOp_pop_eBP)
9699{
9700 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9701 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9702}
9703
9704
9705/** Opcode 0x5e. */
9706FNIEMOP_DEF(iemOp_pop_eSI)
9707{
9708 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9709 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9710}
9711
9712
9713/** Opcode 0x5f. */
9714FNIEMOP_DEF(iemOp_pop_eDI)
9715{
9716 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9718}
9719
9720
9721/** Opcode 0x60. */
9722FNIEMOP_DEF(iemOp_pusha)
9723{
9724 IEMOP_MNEMONIC(pusha, "pusha");
9725 IEMOP_HLP_MIN_186();
9726 IEMOP_HLP_NO_64BIT();
9727 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9728 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9729 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9730 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9731}
9732
9733
9734/** Opcode 0x61. */
9735FNIEMOP_DEF(iemOp_popa)
9736{
9737 IEMOP_MNEMONIC(popa, "popa");
9738 IEMOP_HLP_MIN_186();
9739 IEMOP_HLP_NO_64BIT();
9740 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9741 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9742 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9743 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9744}
9745
9746
9747/** Opcode 0x62. */
9748FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9749// IEMOP_HLP_MIN_186();
9750
9751
9752/** Opcode 0x63 - non-64-bit modes. */
9753FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9754{
9755 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9756 IEMOP_HLP_MIN_286();
9757 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9759
9760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9761 {
9762 /* Register */
9763 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9764 IEM_MC_BEGIN(3, 0);
9765 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9766 IEM_MC_ARG(uint16_t, u16Src, 1);
9767 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9768
9769 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9770 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9771 IEM_MC_REF_EFLAGS(pEFlags);
9772 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9773
9774 IEM_MC_ADVANCE_RIP();
9775 IEM_MC_END();
9776 }
9777 else
9778 {
9779 /* Memory */
9780 IEM_MC_BEGIN(3, 2);
9781 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9782 IEM_MC_ARG(uint16_t, u16Src, 1);
9783 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9785
9786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9787 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9788 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9789 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9790 IEM_MC_FETCH_EFLAGS(EFlags);
9791 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9792
9793 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9794 IEM_MC_COMMIT_EFLAGS(EFlags);
9795 IEM_MC_ADVANCE_RIP();
9796 IEM_MC_END();
9797 }
9798 return VINF_SUCCESS;
9799
9800}
9801
9802
9803/** Opcode 0x63.
9804 * @note This is a weird one. It works like a regular move instruction if
9805 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9806 * @todo This definitely needs a testcase to verify the odd cases. */
9807FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9808{
9809 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9810
9811 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9813
9814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9815 {
9816 /*
9817 * Register to register.
9818 */
9819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9820 IEM_MC_BEGIN(0, 1);
9821 IEM_MC_LOCAL(uint64_t, u64Value);
9822 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9823 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9824 IEM_MC_ADVANCE_RIP();
9825 IEM_MC_END();
9826 }
9827 else
9828 {
9829 /*
9830 * We're loading a register from memory.
9831 */
9832 IEM_MC_BEGIN(0, 2);
9833 IEM_MC_LOCAL(uint64_t, u64Value);
9834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9837 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9838 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9839 IEM_MC_ADVANCE_RIP();
9840 IEM_MC_END();
9841 }
9842 return VINF_SUCCESS;
9843}
9844
9845
9846/** Opcode 0x64. */
9847FNIEMOP_DEF(iemOp_seg_FS)
9848{
9849 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9850 IEMOP_HLP_MIN_386();
9851
9852 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9853 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9854
9855 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9856 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9857}
9858
9859
9860/** Opcode 0x65. */
9861FNIEMOP_DEF(iemOp_seg_GS)
9862{
9863 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9864 IEMOP_HLP_MIN_386();
9865
9866 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9867 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9868
9869 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9870 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9871}
9872
9873
9874/** Opcode 0x66. */
9875FNIEMOP_DEF(iemOp_op_size)
9876{
9877 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9878 IEMOP_HLP_MIN_386();
9879
9880 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9881 iemRecalEffOpSize(pVCpu);
9882
9883 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9884 when REPZ or REPNZ are present. */
9885 if (pVCpu->iem.s.idxPrefix == 0)
9886 pVCpu->iem.s.idxPrefix = 1;
9887
9888 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9889 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9890}
9891
9892
9893/** Opcode 0x67. */
9894FNIEMOP_DEF(iemOp_addr_size)
9895{
9896 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9897 IEMOP_HLP_MIN_386();
9898
9899 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9900 switch (pVCpu->iem.s.enmDefAddrMode)
9901 {
9902 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9903 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9904 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9905 default: AssertFailed();
9906 }
9907
9908 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9909 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9910}
9911
9912
9913/** Opcode 0x68. */
9914FNIEMOP_DEF(iemOp_push_Iz)
9915{
9916 IEMOP_MNEMONIC(push_Iz, "push Iz");
9917 IEMOP_HLP_MIN_186();
9918 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9919 switch (pVCpu->iem.s.enmEffOpSize)
9920 {
9921 case IEMMODE_16BIT:
9922 {
9923 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9925 IEM_MC_BEGIN(0,0);
9926 IEM_MC_PUSH_U16(u16Imm);
9927 IEM_MC_ADVANCE_RIP();
9928 IEM_MC_END();
9929 return VINF_SUCCESS;
9930 }
9931
9932 case IEMMODE_32BIT:
9933 {
9934 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9936 IEM_MC_BEGIN(0,0);
9937 IEM_MC_PUSH_U32(u32Imm);
9938 IEM_MC_ADVANCE_RIP();
9939 IEM_MC_END();
9940 return VINF_SUCCESS;
9941 }
9942
9943 case IEMMODE_64BIT:
9944 {
9945 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9947 IEM_MC_BEGIN(0,0);
9948 IEM_MC_PUSH_U64(u64Imm);
9949 IEM_MC_ADVANCE_RIP();
9950 IEM_MC_END();
9951 return VINF_SUCCESS;
9952 }
9953
9954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9955 }
9956}
9957
9958
9959/** Opcode 0x69. */
9960FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9961{
9962 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9963 IEMOP_HLP_MIN_186();
9964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9965 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9966
9967 switch (pVCpu->iem.s.enmEffOpSize)
9968 {
9969 case IEMMODE_16BIT:
9970 {
9971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9972 {
9973 /* register operand */
9974 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9976
9977 IEM_MC_BEGIN(3, 1);
9978 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9979 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9980 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9981 IEM_MC_LOCAL(uint16_t, u16Tmp);
9982
9983 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9984 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9985 IEM_MC_REF_EFLAGS(pEFlags);
9986 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9987 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9988
9989 IEM_MC_ADVANCE_RIP();
9990 IEM_MC_END();
9991 }
9992 else
9993 {
9994 /* memory operand */
9995 IEM_MC_BEGIN(3, 2);
9996 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9997 IEM_MC_ARG(uint16_t, u16Src, 1);
9998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9999 IEM_MC_LOCAL(uint16_t, u16Tmp);
10000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10001
10002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10003 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10004 IEM_MC_ASSIGN(u16Src, u16Imm);
10005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10006 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10007 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10008 IEM_MC_REF_EFLAGS(pEFlags);
10009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10010 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10011
10012 IEM_MC_ADVANCE_RIP();
10013 IEM_MC_END();
10014 }
10015 return VINF_SUCCESS;
10016 }
10017
10018 case IEMMODE_32BIT:
10019 {
10020 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10021 {
10022 /* register operand */
10023 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10025
10026 IEM_MC_BEGIN(3, 1);
10027 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10028 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
10029 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10030 IEM_MC_LOCAL(uint32_t, u32Tmp);
10031
10032 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10033 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10034 IEM_MC_REF_EFLAGS(pEFlags);
10035 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10036 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10037
10038 IEM_MC_ADVANCE_RIP();
10039 IEM_MC_END();
10040 }
10041 else
10042 {
10043 /* memory operand */
10044 IEM_MC_BEGIN(3, 2);
10045 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10046 IEM_MC_ARG(uint32_t, u32Src, 1);
10047 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10048 IEM_MC_LOCAL(uint32_t, u32Tmp);
10049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10050
10051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10052 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10053 IEM_MC_ASSIGN(u32Src, u32Imm);
10054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10055 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10056 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10057 IEM_MC_REF_EFLAGS(pEFlags);
10058 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10059 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10060
10061 IEM_MC_ADVANCE_RIP();
10062 IEM_MC_END();
10063 }
10064 return VINF_SUCCESS;
10065 }
10066
10067 case IEMMODE_64BIT:
10068 {
10069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10070 {
10071 /* register operand */
10072 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10074
10075 IEM_MC_BEGIN(3, 1);
10076 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10077 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
10078 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10079 IEM_MC_LOCAL(uint64_t, u64Tmp);
10080
10081 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10082 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10083 IEM_MC_REF_EFLAGS(pEFlags);
10084 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10085 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10086
10087 IEM_MC_ADVANCE_RIP();
10088 IEM_MC_END();
10089 }
10090 else
10091 {
10092 /* memory operand */
10093 IEM_MC_BEGIN(3, 2);
10094 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10095 IEM_MC_ARG(uint64_t, u64Src, 1);
10096 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10097 IEM_MC_LOCAL(uint64_t, u64Tmp);
10098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10099
10100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10101 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10102 IEM_MC_ASSIGN(u64Src, u64Imm);
10103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10104 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10105 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10106 IEM_MC_REF_EFLAGS(pEFlags);
10107 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10108 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10109
10110 IEM_MC_ADVANCE_RIP();
10111 IEM_MC_END();
10112 }
10113 return VINF_SUCCESS;
10114 }
10115 }
10116 AssertFailedReturn(VERR_IEM_IPE_9);
10117}
10118
10119
10120/** Opcode 0x6a. */
10121FNIEMOP_DEF(iemOp_push_Ib)
10122{
10123 IEMOP_MNEMONIC(push_Ib, "push Ib");
10124 IEMOP_HLP_MIN_186();
10125 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10127 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10128
10129 IEM_MC_BEGIN(0,0);
10130 switch (pVCpu->iem.s.enmEffOpSize)
10131 {
10132 case IEMMODE_16BIT:
10133 IEM_MC_PUSH_U16(i8Imm);
10134 break;
10135 case IEMMODE_32BIT:
10136 IEM_MC_PUSH_U32(i8Imm);
10137 break;
10138 case IEMMODE_64BIT:
10139 IEM_MC_PUSH_U64(i8Imm);
10140 break;
10141 }
10142 IEM_MC_ADVANCE_RIP();
10143 IEM_MC_END();
10144 return VINF_SUCCESS;
10145}
10146
10147
10148/** Opcode 0x6b. */
10149FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
10150{
10151 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
10152 IEMOP_HLP_MIN_186();
10153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10154 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10155
10156 switch (pVCpu->iem.s.enmEffOpSize)
10157 {
10158 case IEMMODE_16BIT:
10159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10160 {
10161 /* register operand */
10162 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10164
10165 IEM_MC_BEGIN(3, 1);
10166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10167 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
10168 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10169 IEM_MC_LOCAL(uint16_t, u16Tmp);
10170
10171 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10172 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10173 IEM_MC_REF_EFLAGS(pEFlags);
10174 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10175 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10176
10177 IEM_MC_ADVANCE_RIP();
10178 IEM_MC_END();
10179 }
10180 else
10181 {
10182 /* memory operand */
10183 IEM_MC_BEGIN(3, 2);
10184 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10185 IEM_MC_ARG(uint16_t, u16Src, 1);
10186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10187 IEM_MC_LOCAL(uint16_t, u16Tmp);
10188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10189
10190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10191 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
10192 IEM_MC_ASSIGN(u16Src, u16Imm);
10193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10194 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10195 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
10196 IEM_MC_REF_EFLAGS(pEFlags);
10197 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
10198 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
10199
10200 IEM_MC_ADVANCE_RIP();
10201 IEM_MC_END();
10202 }
10203 return VINF_SUCCESS;
10204
10205 case IEMMODE_32BIT:
10206 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10207 {
10208 /* register operand */
10209 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10211
10212 IEM_MC_BEGIN(3, 1);
10213 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10214 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
10215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10216 IEM_MC_LOCAL(uint32_t, u32Tmp);
10217
10218 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10219 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10220 IEM_MC_REF_EFLAGS(pEFlags);
10221 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10222 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10223
10224 IEM_MC_ADVANCE_RIP();
10225 IEM_MC_END();
10226 }
10227 else
10228 {
10229 /* memory operand */
10230 IEM_MC_BEGIN(3, 2);
10231 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10232 IEM_MC_ARG(uint32_t, u32Src, 1);
10233 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10234 IEM_MC_LOCAL(uint32_t, u32Tmp);
10235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10236
10237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10238 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
10239 IEM_MC_ASSIGN(u32Src, u32Imm);
10240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10241 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10242 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10243 IEM_MC_REF_EFLAGS(pEFlags);
10244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10245 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10246
10247 IEM_MC_ADVANCE_RIP();
10248 IEM_MC_END();
10249 }
10250 return VINF_SUCCESS;
10251
10252 case IEMMODE_64BIT:
10253 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10254 {
10255 /* register operand */
10256 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10258
10259 IEM_MC_BEGIN(3, 1);
10260 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10261 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10262 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10263 IEM_MC_LOCAL(uint64_t, u64Tmp);
10264
10265 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10266 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10267 IEM_MC_REF_EFLAGS(pEFlags);
10268 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10269 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10270
10271 IEM_MC_ADVANCE_RIP();
10272 IEM_MC_END();
10273 }
10274 else
10275 {
10276 /* memory operand */
10277 IEM_MC_BEGIN(3, 2);
10278 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10279 IEM_MC_ARG(uint64_t, u64Src, 1);
10280 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10281 IEM_MC_LOCAL(uint64_t, u64Tmp);
10282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10283
10284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10285 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10286 IEM_MC_ASSIGN(u64Src, u64Imm);
10287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10288 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10289 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10290 IEM_MC_REF_EFLAGS(pEFlags);
10291 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10292 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10293
10294 IEM_MC_ADVANCE_RIP();
10295 IEM_MC_END();
10296 }
10297 return VINF_SUCCESS;
10298 }
10299 AssertFailedReturn(VERR_IEM_IPE_8);
10300}
10301
10302
10303/** Opcode 0x6c. */
10304FNIEMOP_DEF(iemOp_insb_Yb_DX)
10305{
10306 IEMOP_HLP_MIN_186();
10307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10308 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10309 {
10310 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10311 switch (pVCpu->iem.s.enmEffAddrMode)
10312 {
10313 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10314 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10315 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10317 }
10318 }
10319 else
10320 {
10321 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10322 switch (pVCpu->iem.s.enmEffAddrMode)
10323 {
10324 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10325 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10326 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10328 }
10329 }
10330}
10331
10332
10333/** Opcode 0x6d. */
10334FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10335{
10336 IEMOP_HLP_MIN_186();
10337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10338 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10339 {
10340 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10341 switch (pVCpu->iem.s.enmEffOpSize)
10342 {
10343 case IEMMODE_16BIT:
10344 switch (pVCpu->iem.s.enmEffAddrMode)
10345 {
10346 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10347 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10348 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10350 }
10351 break;
10352 case IEMMODE_64BIT:
10353 case IEMMODE_32BIT:
10354 switch (pVCpu->iem.s.enmEffAddrMode)
10355 {
10356 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10357 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10358 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10360 }
10361 break;
10362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10363 }
10364 }
10365 else
10366 {
10367 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10368 switch (pVCpu->iem.s.enmEffOpSize)
10369 {
10370 case IEMMODE_16BIT:
10371 switch (pVCpu->iem.s.enmEffAddrMode)
10372 {
10373 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10374 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10375 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10377 }
10378 break;
10379 case IEMMODE_64BIT:
10380 case IEMMODE_32BIT:
10381 switch (pVCpu->iem.s.enmEffAddrMode)
10382 {
10383 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10384 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10385 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10387 }
10388 break;
10389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10390 }
10391 }
10392}
10393
10394
10395/** Opcode 0x6e. */
10396FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10397{
10398 IEMOP_HLP_MIN_186();
10399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10400 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10401 {
10402 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10403 switch (pVCpu->iem.s.enmEffAddrMode)
10404 {
10405 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10406 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10407 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10409 }
10410 }
10411 else
10412 {
10413 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10414 switch (pVCpu->iem.s.enmEffAddrMode)
10415 {
10416 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10417 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10418 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10420 }
10421 }
10422}
10423
10424
10425/** Opcode 0x6f. */
10426FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10427{
10428 IEMOP_HLP_MIN_186();
10429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10430 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10431 {
10432 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10433 switch (pVCpu->iem.s.enmEffOpSize)
10434 {
10435 case IEMMODE_16BIT:
10436 switch (pVCpu->iem.s.enmEffAddrMode)
10437 {
10438 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10439 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10440 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10442 }
10443 break;
10444 case IEMMODE_64BIT:
10445 case IEMMODE_32BIT:
10446 switch (pVCpu->iem.s.enmEffAddrMode)
10447 {
10448 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10449 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10450 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10452 }
10453 break;
10454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10455 }
10456 }
10457 else
10458 {
10459 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10460 switch (pVCpu->iem.s.enmEffOpSize)
10461 {
10462 case IEMMODE_16BIT:
10463 switch (pVCpu->iem.s.enmEffAddrMode)
10464 {
10465 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10466 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10467 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10469 }
10470 break;
10471 case IEMMODE_64BIT:
10472 case IEMMODE_32BIT:
10473 switch (pVCpu->iem.s.enmEffAddrMode)
10474 {
10475 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10476 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10477 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10479 }
10480 break;
10481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10482 }
10483 }
10484}
10485
10486
10487/** Opcode 0x70. */
10488FNIEMOP_DEF(iemOp_jo_Jb)
10489{
10490 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10491 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10493 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10494
10495 IEM_MC_BEGIN(0, 0);
10496 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10497 IEM_MC_REL_JMP_S8(i8Imm);
10498 } IEM_MC_ELSE() {
10499 IEM_MC_ADVANCE_RIP();
10500 } IEM_MC_ENDIF();
10501 IEM_MC_END();
10502 return VINF_SUCCESS;
10503}
10504
10505
10506/** Opcode 0x71. */
10507FNIEMOP_DEF(iemOp_jno_Jb)
10508{
10509 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10510 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10512 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10513
10514 IEM_MC_BEGIN(0, 0);
10515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10516 IEM_MC_ADVANCE_RIP();
10517 } IEM_MC_ELSE() {
10518 IEM_MC_REL_JMP_S8(i8Imm);
10519 } IEM_MC_ENDIF();
10520 IEM_MC_END();
10521 return VINF_SUCCESS;
10522}
10523
10524/** Opcode 0x72. */
10525FNIEMOP_DEF(iemOp_jc_Jb)
10526{
10527 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10528 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10530 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10531
10532 IEM_MC_BEGIN(0, 0);
10533 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10534 IEM_MC_REL_JMP_S8(i8Imm);
10535 } IEM_MC_ELSE() {
10536 IEM_MC_ADVANCE_RIP();
10537 } IEM_MC_ENDIF();
10538 IEM_MC_END();
10539 return VINF_SUCCESS;
10540}
10541
10542
10543/** Opcode 0x73. */
10544FNIEMOP_DEF(iemOp_jnc_Jb)
10545{
10546 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10547 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10550
10551 IEM_MC_BEGIN(0, 0);
10552 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10553 IEM_MC_ADVANCE_RIP();
10554 } IEM_MC_ELSE() {
10555 IEM_MC_REL_JMP_S8(i8Imm);
10556 } IEM_MC_ENDIF();
10557 IEM_MC_END();
10558 return VINF_SUCCESS;
10559}
10560
10561
10562/** Opcode 0x74. */
10563FNIEMOP_DEF(iemOp_je_Jb)
10564{
10565 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10566 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10568 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10569
10570 IEM_MC_BEGIN(0, 0);
10571 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10572 IEM_MC_REL_JMP_S8(i8Imm);
10573 } IEM_MC_ELSE() {
10574 IEM_MC_ADVANCE_RIP();
10575 } IEM_MC_ENDIF();
10576 IEM_MC_END();
10577 return VINF_SUCCESS;
10578}
10579
10580
10581/** Opcode 0x75. */
10582FNIEMOP_DEF(iemOp_jne_Jb)
10583{
10584 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10585 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10587 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10588
10589 IEM_MC_BEGIN(0, 0);
10590 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10591 IEM_MC_ADVANCE_RIP();
10592 } IEM_MC_ELSE() {
10593 IEM_MC_REL_JMP_S8(i8Imm);
10594 } IEM_MC_ENDIF();
10595 IEM_MC_END();
10596 return VINF_SUCCESS;
10597}
10598
10599
10600/** Opcode 0x76. */
10601FNIEMOP_DEF(iemOp_jbe_Jb)
10602{
10603 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10604 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10607
10608 IEM_MC_BEGIN(0, 0);
10609 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10610 IEM_MC_REL_JMP_S8(i8Imm);
10611 } IEM_MC_ELSE() {
10612 IEM_MC_ADVANCE_RIP();
10613 } IEM_MC_ENDIF();
10614 IEM_MC_END();
10615 return VINF_SUCCESS;
10616}
10617
10618
10619/** Opcode 0x77. */
10620FNIEMOP_DEF(iemOp_jnbe_Jb)
10621{
10622 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10623 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10625 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10626
10627 IEM_MC_BEGIN(0, 0);
10628 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10629 IEM_MC_ADVANCE_RIP();
10630 } IEM_MC_ELSE() {
10631 IEM_MC_REL_JMP_S8(i8Imm);
10632 } IEM_MC_ENDIF();
10633 IEM_MC_END();
10634 return VINF_SUCCESS;
10635}
10636
10637
10638/** Opcode 0x78. */
10639FNIEMOP_DEF(iemOp_js_Jb)
10640{
10641 IEMOP_MNEMONIC(js_Jb, "js Jb");
10642 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10644 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10645
10646 IEM_MC_BEGIN(0, 0);
10647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10648 IEM_MC_REL_JMP_S8(i8Imm);
10649 } IEM_MC_ELSE() {
10650 IEM_MC_ADVANCE_RIP();
10651 } IEM_MC_ENDIF();
10652 IEM_MC_END();
10653 return VINF_SUCCESS;
10654}
10655
10656
10657/** Opcode 0x79. */
10658FNIEMOP_DEF(iemOp_jns_Jb)
10659{
10660 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10661 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10664
10665 IEM_MC_BEGIN(0, 0);
10666 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10667 IEM_MC_ADVANCE_RIP();
10668 } IEM_MC_ELSE() {
10669 IEM_MC_REL_JMP_S8(i8Imm);
10670 } IEM_MC_ENDIF();
10671 IEM_MC_END();
10672 return VINF_SUCCESS;
10673}
10674
10675
10676/** Opcode 0x7a. */
10677FNIEMOP_DEF(iemOp_jp_Jb)
10678{
10679 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10680 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10683
10684 IEM_MC_BEGIN(0, 0);
10685 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10686 IEM_MC_REL_JMP_S8(i8Imm);
10687 } IEM_MC_ELSE() {
10688 IEM_MC_ADVANCE_RIP();
10689 } IEM_MC_ENDIF();
10690 IEM_MC_END();
10691 return VINF_SUCCESS;
10692}
10693
10694
10695/** Opcode 0x7b. */
10696FNIEMOP_DEF(iemOp_jnp_Jb)
10697{
10698 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10699 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10701 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10702
10703 IEM_MC_BEGIN(0, 0);
10704 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10705 IEM_MC_ADVANCE_RIP();
10706 } IEM_MC_ELSE() {
10707 IEM_MC_REL_JMP_S8(i8Imm);
10708 } IEM_MC_ENDIF();
10709 IEM_MC_END();
10710 return VINF_SUCCESS;
10711}
10712
10713
10714/** Opcode 0x7c. */
10715FNIEMOP_DEF(iemOp_jl_Jb)
10716{
10717 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10718 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10720 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10721
10722 IEM_MC_BEGIN(0, 0);
10723 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10724 IEM_MC_REL_JMP_S8(i8Imm);
10725 } IEM_MC_ELSE() {
10726 IEM_MC_ADVANCE_RIP();
10727 } IEM_MC_ENDIF();
10728 IEM_MC_END();
10729 return VINF_SUCCESS;
10730}
10731
10732
10733/** Opcode 0x7d. */
10734FNIEMOP_DEF(iemOp_jnl_Jb)
10735{
10736 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10737 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10739 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10740
10741 IEM_MC_BEGIN(0, 0);
10742 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10743 IEM_MC_ADVANCE_RIP();
10744 } IEM_MC_ELSE() {
10745 IEM_MC_REL_JMP_S8(i8Imm);
10746 } IEM_MC_ENDIF();
10747 IEM_MC_END();
10748 return VINF_SUCCESS;
10749}
10750
10751
10752/** Opcode 0x7e. */
10753FNIEMOP_DEF(iemOp_jle_Jb)
10754{
10755 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10756 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10758 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10759
10760 IEM_MC_BEGIN(0, 0);
10761 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10762 IEM_MC_REL_JMP_S8(i8Imm);
10763 } IEM_MC_ELSE() {
10764 IEM_MC_ADVANCE_RIP();
10765 } IEM_MC_ENDIF();
10766 IEM_MC_END();
10767 return VINF_SUCCESS;
10768}
10769
10770
10771/** Opcode 0x7f. */
10772FNIEMOP_DEF(iemOp_jnle_Jb)
10773{
10774 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10775 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10777 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10778
10779 IEM_MC_BEGIN(0, 0);
10780 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10781 IEM_MC_ADVANCE_RIP();
10782 } IEM_MC_ELSE() {
10783 IEM_MC_REL_JMP_S8(i8Imm);
10784 } IEM_MC_ENDIF();
10785 IEM_MC_END();
10786 return VINF_SUCCESS;
10787}
10788
10789
10790/** Opcode 0x80. */
10791FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10792{
10793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10794 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10795 {
10796 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10797 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10798 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10799 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10800 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10801 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10802 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10803 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10804 }
10805 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10806
10807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10808 {
10809 /* register target */
10810 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10812 IEM_MC_BEGIN(3, 0);
10813 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10814 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10816
10817 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10818 IEM_MC_REF_EFLAGS(pEFlags);
10819 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10820
10821 IEM_MC_ADVANCE_RIP();
10822 IEM_MC_END();
10823 }
10824 else
10825 {
10826 /* memory target */
10827 uint32_t fAccess;
10828 if (pImpl->pfnLockedU8)
10829 fAccess = IEM_ACCESS_DATA_RW;
10830 else /* CMP */
10831 fAccess = IEM_ACCESS_DATA_R;
10832 IEM_MC_BEGIN(3, 2);
10833 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10834 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10836
10837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10838 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10839 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10840 if (pImpl->pfnLockedU8)
10841 IEMOP_HLP_DONE_DECODING();
10842 else
10843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10844
10845 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10846 IEM_MC_FETCH_EFLAGS(EFlags);
10847 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10848 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10849 else
10850 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10851
10852 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10853 IEM_MC_COMMIT_EFLAGS(EFlags);
10854 IEM_MC_ADVANCE_RIP();
10855 IEM_MC_END();
10856 }
10857 return VINF_SUCCESS;
10858}
10859
10860
10861/** Opcode 0x81. */
10862FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10863{
10864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10865 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10866 {
10867 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10868 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10869 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10870 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10871 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10872 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10873 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10874 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10875 }
10876 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10877
10878 switch (pVCpu->iem.s.enmEffOpSize)
10879 {
10880 case IEMMODE_16BIT:
10881 {
10882 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10883 {
10884 /* register target */
10885 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10887 IEM_MC_BEGIN(3, 0);
10888 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10889 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10890 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10891
10892 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10893 IEM_MC_REF_EFLAGS(pEFlags);
10894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10895
10896 IEM_MC_ADVANCE_RIP();
10897 IEM_MC_END();
10898 }
10899 else
10900 {
10901 /* memory target */
10902 uint32_t fAccess;
10903 if (pImpl->pfnLockedU16)
10904 fAccess = IEM_ACCESS_DATA_RW;
10905 else /* CMP, TEST */
10906 fAccess = IEM_ACCESS_DATA_R;
10907 IEM_MC_BEGIN(3, 2);
10908 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10909 IEM_MC_ARG(uint16_t, u16Src, 1);
10910 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10912
10913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10914 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10915 IEM_MC_ASSIGN(u16Src, u16Imm);
10916 if (pImpl->pfnLockedU16)
10917 IEMOP_HLP_DONE_DECODING();
10918 else
10919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10920 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10921 IEM_MC_FETCH_EFLAGS(EFlags);
10922 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10924 else
10925 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10926
10927 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10928 IEM_MC_COMMIT_EFLAGS(EFlags);
10929 IEM_MC_ADVANCE_RIP();
10930 IEM_MC_END();
10931 }
10932 break;
10933 }
10934
10935 case IEMMODE_32BIT:
10936 {
10937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10938 {
10939 /* register target */
10940 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10942 IEM_MC_BEGIN(3, 0);
10943 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10944 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10946
10947 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10948 IEM_MC_REF_EFLAGS(pEFlags);
10949 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10950 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10951
10952 IEM_MC_ADVANCE_RIP();
10953 IEM_MC_END();
10954 }
10955 else
10956 {
10957 /* memory target */
10958 uint32_t fAccess;
10959 if (pImpl->pfnLockedU32)
10960 fAccess = IEM_ACCESS_DATA_RW;
10961 else /* CMP, TEST */
10962 fAccess = IEM_ACCESS_DATA_R;
10963 IEM_MC_BEGIN(3, 2);
10964 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10965 IEM_MC_ARG(uint32_t, u32Src, 1);
10966 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10968
10969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10970 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10971 IEM_MC_ASSIGN(u32Src, u32Imm);
10972 if (pImpl->pfnLockedU32)
10973 IEMOP_HLP_DONE_DECODING();
10974 else
10975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10976 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10977 IEM_MC_FETCH_EFLAGS(EFlags);
10978 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10979 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10980 else
10981 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10982
10983 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10984 IEM_MC_COMMIT_EFLAGS(EFlags);
10985 IEM_MC_ADVANCE_RIP();
10986 IEM_MC_END();
10987 }
10988 break;
10989 }
10990
10991 case IEMMODE_64BIT:
10992 {
10993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10994 {
10995 /* register target */
10996 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10998 IEM_MC_BEGIN(3, 0);
10999 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11000 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
11001 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11002
11003 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11004 IEM_MC_REF_EFLAGS(pEFlags);
11005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11006
11007 IEM_MC_ADVANCE_RIP();
11008 IEM_MC_END();
11009 }
11010 else
11011 {
11012 /* memory target */
11013 uint32_t fAccess;
11014 if (pImpl->pfnLockedU64)
11015 fAccess = IEM_ACCESS_DATA_RW;
11016 else /* CMP */
11017 fAccess = IEM_ACCESS_DATA_R;
11018 IEM_MC_BEGIN(3, 2);
11019 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11020 IEM_MC_ARG(uint64_t, u64Src, 1);
11021 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11023
11024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11025 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11026 if (pImpl->pfnLockedU64)
11027 IEMOP_HLP_DONE_DECODING();
11028 else
11029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11030 IEM_MC_ASSIGN(u64Src, u64Imm);
11031 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11032 IEM_MC_FETCH_EFLAGS(EFlags);
11033 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11034 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11035 else
11036 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11037
11038 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11039 IEM_MC_COMMIT_EFLAGS(EFlags);
11040 IEM_MC_ADVANCE_RIP();
11041 IEM_MC_END();
11042 }
11043 break;
11044 }
11045 }
11046 return VINF_SUCCESS;
11047}
11048
11049
11050/** Opcode 0x82. */
11051FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
11052{
11053 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
11054 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
11055}
11056
11057
11058/** Opcode 0x83. */
11059FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
11060{
11061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11062 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11063 {
11064 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
11065 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
11066 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
11067 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
11068 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
11069 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
11070 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
11071 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
11072 }
11073 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
11074 to the 386 even if absent in the intel reference manuals and some
11075 3rd party opcode listings. */
11076 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
11077
11078 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11079 {
11080 /*
11081 * Register target
11082 */
11083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11084 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11085 switch (pVCpu->iem.s.enmEffOpSize)
11086 {
11087 case IEMMODE_16BIT:
11088 {
11089 IEM_MC_BEGIN(3, 0);
11090 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11091 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
11092 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11093
11094 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11095 IEM_MC_REF_EFLAGS(pEFlags);
11096 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11097
11098 IEM_MC_ADVANCE_RIP();
11099 IEM_MC_END();
11100 break;
11101 }
11102
11103 case IEMMODE_32BIT:
11104 {
11105 IEM_MC_BEGIN(3, 0);
11106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11107 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
11108 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11109
11110 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11111 IEM_MC_REF_EFLAGS(pEFlags);
11112 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11113 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11114
11115 IEM_MC_ADVANCE_RIP();
11116 IEM_MC_END();
11117 break;
11118 }
11119
11120 case IEMMODE_64BIT:
11121 {
11122 IEM_MC_BEGIN(3, 0);
11123 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11124 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
11125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11126
11127 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11128 IEM_MC_REF_EFLAGS(pEFlags);
11129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11130
11131 IEM_MC_ADVANCE_RIP();
11132 IEM_MC_END();
11133 break;
11134 }
11135 }
11136 }
11137 else
11138 {
11139 /*
11140 * Memory target.
11141 */
11142 uint32_t fAccess;
11143 if (pImpl->pfnLockedU16)
11144 fAccess = IEM_ACCESS_DATA_RW;
11145 else /* CMP */
11146 fAccess = IEM_ACCESS_DATA_R;
11147
11148 switch (pVCpu->iem.s.enmEffOpSize)
11149 {
11150 case IEMMODE_16BIT:
11151 {
11152 IEM_MC_BEGIN(3, 2);
11153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11154 IEM_MC_ARG(uint16_t, u16Src, 1);
11155 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11157
11158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11159 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11160 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
11161 if (pImpl->pfnLockedU16)
11162 IEMOP_HLP_DONE_DECODING();
11163 else
11164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11165 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11166 IEM_MC_FETCH_EFLAGS(EFlags);
11167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11169 else
11170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
11171
11172 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
11173 IEM_MC_COMMIT_EFLAGS(EFlags);
11174 IEM_MC_ADVANCE_RIP();
11175 IEM_MC_END();
11176 break;
11177 }
11178
11179 case IEMMODE_32BIT:
11180 {
11181 IEM_MC_BEGIN(3, 2);
11182 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11183 IEM_MC_ARG(uint32_t, u32Src, 1);
11184 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11186
11187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11188 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11189 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
11190 if (pImpl->pfnLockedU32)
11191 IEMOP_HLP_DONE_DECODING();
11192 else
11193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11195 IEM_MC_FETCH_EFLAGS(EFlags);
11196 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11198 else
11199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
11200
11201 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
11202 IEM_MC_COMMIT_EFLAGS(EFlags);
11203 IEM_MC_ADVANCE_RIP();
11204 IEM_MC_END();
11205 break;
11206 }
11207
11208 case IEMMODE_64BIT:
11209 {
11210 IEM_MC_BEGIN(3, 2);
11211 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11212 IEM_MC_ARG(uint64_t, u64Src, 1);
11213 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11215
11216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11217 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11218 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
11219 if (pImpl->pfnLockedU64)
11220 IEMOP_HLP_DONE_DECODING();
11221 else
11222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11223 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11224 IEM_MC_FETCH_EFLAGS(EFlags);
11225 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11226 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11227 else
11228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11229
11230 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11231 IEM_MC_COMMIT_EFLAGS(EFlags);
11232 IEM_MC_ADVANCE_RIP();
11233 IEM_MC_END();
11234 break;
11235 }
11236 }
11237 }
11238 return VINF_SUCCESS;
11239}
11240
11241
11242/** Opcode 0x84. */
11243FNIEMOP_DEF(iemOp_test_Eb_Gb)
11244{
11245 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11246 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11247 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11248}
11249
11250
11251/** Opcode 0x85. */
11252FNIEMOP_DEF(iemOp_test_Ev_Gv)
11253{
11254 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11255 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11256 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11257}
11258
11259
11260/** Opcode 0x86. */
11261FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11262{
11263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11264 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11265
11266 /*
11267 * If rm is denoting a register, no more instruction bytes.
11268 */
11269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11270 {
11271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11272
11273 IEM_MC_BEGIN(0, 2);
11274 IEM_MC_LOCAL(uint8_t, uTmp1);
11275 IEM_MC_LOCAL(uint8_t, uTmp2);
11276
11277 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11278 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11279 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11280 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11281
11282 IEM_MC_ADVANCE_RIP();
11283 IEM_MC_END();
11284 }
11285 else
11286 {
11287 /*
11288 * We're accessing memory.
11289 */
11290/** @todo the register must be committed separately! */
11291 IEM_MC_BEGIN(2, 2);
11292 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11293 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11295
11296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11297 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11298 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11299 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11300 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11301
11302 IEM_MC_ADVANCE_RIP();
11303 IEM_MC_END();
11304 }
11305 return VINF_SUCCESS;
11306}
11307
11308
11309/** Opcode 0x87. */
11310FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11311{
11312 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11314
11315 /*
11316 * If rm is denoting a register, no more instruction bytes.
11317 */
11318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11319 {
11320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11321
11322 switch (pVCpu->iem.s.enmEffOpSize)
11323 {
11324 case IEMMODE_16BIT:
11325 IEM_MC_BEGIN(0, 2);
11326 IEM_MC_LOCAL(uint16_t, uTmp1);
11327 IEM_MC_LOCAL(uint16_t, uTmp2);
11328
11329 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11330 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11331 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11332 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11333
11334 IEM_MC_ADVANCE_RIP();
11335 IEM_MC_END();
11336 return VINF_SUCCESS;
11337
11338 case IEMMODE_32BIT:
11339 IEM_MC_BEGIN(0, 2);
11340 IEM_MC_LOCAL(uint32_t, uTmp1);
11341 IEM_MC_LOCAL(uint32_t, uTmp2);
11342
11343 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11344 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11345 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11346 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11347
11348 IEM_MC_ADVANCE_RIP();
11349 IEM_MC_END();
11350 return VINF_SUCCESS;
11351
11352 case IEMMODE_64BIT:
11353 IEM_MC_BEGIN(0, 2);
11354 IEM_MC_LOCAL(uint64_t, uTmp1);
11355 IEM_MC_LOCAL(uint64_t, uTmp2);
11356
11357 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11358 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11359 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11360 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11361
11362 IEM_MC_ADVANCE_RIP();
11363 IEM_MC_END();
11364 return VINF_SUCCESS;
11365
11366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11367 }
11368 }
11369 else
11370 {
11371 /*
11372 * We're accessing memory.
11373 */
11374 switch (pVCpu->iem.s.enmEffOpSize)
11375 {
11376/** @todo the register must be committed separately! */
11377 case IEMMODE_16BIT:
11378 IEM_MC_BEGIN(2, 2);
11379 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11380 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11382
11383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11384 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11385 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11386 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11387 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11388
11389 IEM_MC_ADVANCE_RIP();
11390 IEM_MC_END();
11391 return VINF_SUCCESS;
11392
11393 case IEMMODE_32BIT:
11394 IEM_MC_BEGIN(2, 2);
11395 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11396 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11398
11399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11400 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11401 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11402 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11403 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11404
11405 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11406 IEM_MC_ADVANCE_RIP();
11407 IEM_MC_END();
11408 return VINF_SUCCESS;
11409
11410 case IEMMODE_64BIT:
11411 IEM_MC_BEGIN(2, 2);
11412 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11413 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11415
11416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11417 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11418 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11419 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11420 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11421
11422 IEM_MC_ADVANCE_RIP();
11423 IEM_MC_END();
11424 return VINF_SUCCESS;
11425
11426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11427 }
11428 }
11429}
11430
11431
11432/** Opcode 0x88. */
11433FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11434{
11435 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11436
11437 uint8_t bRm;
11438 IEM_OPCODE_GET_NEXT_U8(&bRm);
11439
11440 /*
11441 * If rm is denoting a register, no more instruction bytes.
11442 */
11443 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11444 {
11445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11446 IEM_MC_BEGIN(0, 1);
11447 IEM_MC_LOCAL(uint8_t, u8Value);
11448 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11449 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11450 IEM_MC_ADVANCE_RIP();
11451 IEM_MC_END();
11452 }
11453 else
11454 {
11455 /*
11456 * We're writing a register to memory.
11457 */
11458 IEM_MC_BEGIN(0, 2);
11459 IEM_MC_LOCAL(uint8_t, u8Value);
11460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11463 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11464 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11465 IEM_MC_ADVANCE_RIP();
11466 IEM_MC_END();
11467 }
11468 return VINF_SUCCESS;
11469
11470}
11471
11472
11473/** Opcode 0x89. */
11474FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11475{
11476 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11477
11478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11479
11480 /*
11481 * If rm is denoting a register, no more instruction bytes.
11482 */
11483 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11484 {
11485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11486 switch (pVCpu->iem.s.enmEffOpSize)
11487 {
11488 case IEMMODE_16BIT:
11489 IEM_MC_BEGIN(0, 1);
11490 IEM_MC_LOCAL(uint16_t, u16Value);
11491 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11492 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11493 IEM_MC_ADVANCE_RIP();
11494 IEM_MC_END();
11495 break;
11496
11497 case IEMMODE_32BIT:
11498 IEM_MC_BEGIN(0, 1);
11499 IEM_MC_LOCAL(uint32_t, u32Value);
11500 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11501 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11502 IEM_MC_ADVANCE_RIP();
11503 IEM_MC_END();
11504 break;
11505
11506 case IEMMODE_64BIT:
11507 IEM_MC_BEGIN(0, 1);
11508 IEM_MC_LOCAL(uint64_t, u64Value);
11509 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11510 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11511 IEM_MC_ADVANCE_RIP();
11512 IEM_MC_END();
11513 break;
11514 }
11515 }
11516 else
11517 {
11518 /*
11519 * We're writing a register to memory.
11520 */
11521 switch (pVCpu->iem.s.enmEffOpSize)
11522 {
11523 case IEMMODE_16BIT:
11524 IEM_MC_BEGIN(0, 2);
11525 IEM_MC_LOCAL(uint16_t, u16Value);
11526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11529 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11530 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11531 IEM_MC_ADVANCE_RIP();
11532 IEM_MC_END();
11533 break;
11534
11535 case IEMMODE_32BIT:
11536 IEM_MC_BEGIN(0, 2);
11537 IEM_MC_LOCAL(uint32_t, u32Value);
11538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11541 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11542 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11543 IEM_MC_ADVANCE_RIP();
11544 IEM_MC_END();
11545 break;
11546
11547 case IEMMODE_64BIT:
11548 IEM_MC_BEGIN(0, 2);
11549 IEM_MC_LOCAL(uint64_t, u64Value);
11550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11553 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11554 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11555 IEM_MC_ADVANCE_RIP();
11556 IEM_MC_END();
11557 break;
11558 }
11559 }
11560 return VINF_SUCCESS;
11561}
11562
11563
11564/** Opcode 0x8a. */
11565FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11566{
11567 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11568
11569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11570
11571 /*
11572 * If rm is denoting a register, no more instruction bytes.
11573 */
11574 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11575 {
11576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11577 IEM_MC_BEGIN(0, 1);
11578 IEM_MC_LOCAL(uint8_t, u8Value);
11579 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11580 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11581 IEM_MC_ADVANCE_RIP();
11582 IEM_MC_END();
11583 }
11584 else
11585 {
11586 /*
11587 * We're loading a register from memory.
11588 */
11589 IEM_MC_BEGIN(0, 2);
11590 IEM_MC_LOCAL(uint8_t, u8Value);
11591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11594 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11595 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11596 IEM_MC_ADVANCE_RIP();
11597 IEM_MC_END();
11598 }
11599 return VINF_SUCCESS;
11600}
11601
11602
11603/** Opcode 0x8b. */
11604FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11605{
11606 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11607
11608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11609
11610 /*
11611 * If rm is denoting a register, no more instruction bytes.
11612 */
11613 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11614 {
11615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11616 switch (pVCpu->iem.s.enmEffOpSize)
11617 {
11618 case IEMMODE_16BIT:
11619 IEM_MC_BEGIN(0, 1);
11620 IEM_MC_LOCAL(uint16_t, u16Value);
11621 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11622 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11623 IEM_MC_ADVANCE_RIP();
11624 IEM_MC_END();
11625 break;
11626
11627 case IEMMODE_32BIT:
11628 IEM_MC_BEGIN(0, 1);
11629 IEM_MC_LOCAL(uint32_t, u32Value);
11630 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11631 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11632 IEM_MC_ADVANCE_RIP();
11633 IEM_MC_END();
11634 break;
11635
11636 case IEMMODE_64BIT:
11637 IEM_MC_BEGIN(0, 1);
11638 IEM_MC_LOCAL(uint64_t, u64Value);
11639 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11640 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11641 IEM_MC_ADVANCE_RIP();
11642 IEM_MC_END();
11643 break;
11644 }
11645 }
11646 else
11647 {
11648 /*
11649 * We're loading a register from memory.
11650 */
11651 switch (pVCpu->iem.s.enmEffOpSize)
11652 {
11653 case IEMMODE_16BIT:
11654 IEM_MC_BEGIN(0, 2);
11655 IEM_MC_LOCAL(uint16_t, u16Value);
11656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11659 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11660 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11661 IEM_MC_ADVANCE_RIP();
11662 IEM_MC_END();
11663 break;
11664
11665 case IEMMODE_32BIT:
11666 IEM_MC_BEGIN(0, 2);
11667 IEM_MC_LOCAL(uint32_t, u32Value);
11668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11671 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11672 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11673 IEM_MC_ADVANCE_RIP();
11674 IEM_MC_END();
11675 break;
11676
11677 case IEMMODE_64BIT:
11678 IEM_MC_BEGIN(0, 2);
11679 IEM_MC_LOCAL(uint64_t, u64Value);
11680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11683 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11684 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11685 IEM_MC_ADVANCE_RIP();
11686 IEM_MC_END();
11687 break;
11688 }
11689 }
11690 return VINF_SUCCESS;
11691}
11692
11693
11694/** Opcode 0x63. */
11695FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11696{
11697 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11698 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11699 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11700 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11701 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11702}
11703
11704
11705/** Opcode 0x8c. */
11706FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11707{
11708 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11709
11710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11711
11712 /*
11713 * Check that the destination register exists. The REX.R prefix is ignored.
11714 */
11715 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11716 if ( iSegReg > X86_SREG_GS)
11717 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11718
11719 /*
11720 * If rm is denoting a register, no more instruction bytes.
11721 * In that case, the operand size is respected and the upper bits are
11722 * cleared (starting with some pentium).
11723 */
11724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11725 {
11726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11727 switch (pVCpu->iem.s.enmEffOpSize)
11728 {
11729 case IEMMODE_16BIT:
11730 IEM_MC_BEGIN(0, 1);
11731 IEM_MC_LOCAL(uint16_t, u16Value);
11732 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11733 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11734 IEM_MC_ADVANCE_RIP();
11735 IEM_MC_END();
11736 break;
11737
11738 case IEMMODE_32BIT:
11739 IEM_MC_BEGIN(0, 1);
11740 IEM_MC_LOCAL(uint32_t, u32Value);
11741 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11742 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11743 IEM_MC_ADVANCE_RIP();
11744 IEM_MC_END();
11745 break;
11746
11747 case IEMMODE_64BIT:
11748 IEM_MC_BEGIN(0, 1);
11749 IEM_MC_LOCAL(uint64_t, u64Value);
11750 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11751 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11752 IEM_MC_ADVANCE_RIP();
11753 IEM_MC_END();
11754 break;
11755 }
11756 }
11757 else
11758 {
11759 /*
11760 * We're saving the register to memory. The access is word sized
11761 * regardless of operand size prefixes.
11762 */
11763#if 0 /* not necessary */
11764 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11765#endif
11766 IEM_MC_BEGIN(0, 2);
11767 IEM_MC_LOCAL(uint16_t, u16Value);
11768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11771 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11772 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11773 IEM_MC_ADVANCE_RIP();
11774 IEM_MC_END();
11775 }
11776 return VINF_SUCCESS;
11777}
11778
11779
11780
11781
11782/** Opcode 0x8d. */
11783FNIEMOP_DEF(iemOp_lea_Gv_M)
11784{
11785 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11788 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11789
11790 switch (pVCpu->iem.s.enmEffOpSize)
11791 {
11792 case IEMMODE_16BIT:
11793 IEM_MC_BEGIN(0, 2);
11794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11795 IEM_MC_LOCAL(uint16_t, u16Cast);
11796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11798 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11799 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11800 IEM_MC_ADVANCE_RIP();
11801 IEM_MC_END();
11802 return VINF_SUCCESS;
11803
11804 case IEMMODE_32BIT:
11805 IEM_MC_BEGIN(0, 2);
11806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11807 IEM_MC_LOCAL(uint32_t, u32Cast);
11808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11810 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11811 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11812 IEM_MC_ADVANCE_RIP();
11813 IEM_MC_END();
11814 return VINF_SUCCESS;
11815
11816 case IEMMODE_64BIT:
11817 IEM_MC_BEGIN(0, 1);
11818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11821 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11822 IEM_MC_ADVANCE_RIP();
11823 IEM_MC_END();
11824 return VINF_SUCCESS;
11825 }
11826 AssertFailedReturn(VERR_IEM_IPE_7);
11827}
11828
11829
11830/** Opcode 0x8e. */
11831FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11832{
11833 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11834
11835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11836
11837 /*
11838 * The practical operand size is 16-bit.
11839 */
11840#if 0 /* not necessary */
11841 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11842#endif
11843
11844 /*
11845 * Check that the destination register exists and can be used with this
11846 * instruction. The REX.R prefix is ignored.
11847 */
11848 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11849 if ( iSegReg == X86_SREG_CS
11850 || iSegReg > X86_SREG_GS)
11851 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11852
11853 /*
11854 * If rm is denoting a register, no more instruction bytes.
11855 */
11856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11857 {
11858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11859 IEM_MC_BEGIN(2, 0);
11860 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11861 IEM_MC_ARG(uint16_t, u16Value, 1);
11862 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11863 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11864 IEM_MC_END();
11865 }
11866 else
11867 {
11868 /*
11869 * We're loading the register from memory. The access is word sized
11870 * regardless of operand size prefixes.
11871 */
11872 IEM_MC_BEGIN(2, 1);
11873 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11874 IEM_MC_ARG(uint16_t, u16Value, 1);
11875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11878 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11879 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11880 IEM_MC_END();
11881 }
11882 return VINF_SUCCESS;
11883}
11884
11885
11886/** Opcode 0x8f /0. */
11887FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11888{
11889 /* This bugger is rather annoying as it requires rSP to be updated before
11890 doing the effective address calculations. Will eventually require a
11891 split between the R/M+SIB decoding and the effective address
11892 calculation - which is something that is required for any attempt at
11893 reusing this code for a recompiler. It may also be good to have if we
11894 need to delay #UD exception caused by invalid lock prefixes.
11895
11896 For now, we'll do a mostly safe interpreter-only implementation here. */
11897 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11898 * now until tests show it's checked.. */
11899 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11900
11901 /* Register access is relatively easy and can share code. */
11902 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11903 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11904
11905 /*
11906 * Memory target.
11907 *
11908 * Intel says that RSP is incremented before it's used in any effective
11909 * address calcuations. This means some serious extra annoyance here since
11910 * we decode and calculate the effective address in one step and like to
11911 * delay committing registers till everything is done.
11912 *
11913 * So, we'll decode and calculate the effective address twice. This will
11914 * require some recoding if turned into a recompiler.
11915 */
11916 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11917
11918#ifndef TST_IEM_CHECK_MC
11919 /* Calc effective address with modified ESP. */
11920/** @todo testcase */
11921 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11922 RTGCPTR GCPtrEff;
11923 VBOXSTRICTRC rcStrict;
11924 switch (pVCpu->iem.s.enmEffOpSize)
11925 {
11926 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11927 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11928 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11930 }
11931 if (rcStrict != VINF_SUCCESS)
11932 return rcStrict;
11933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11934
11935 /* Perform the operation - this should be CImpl. */
11936 RTUINT64U TmpRsp;
11937 TmpRsp.u = pCtx->rsp;
11938 switch (pVCpu->iem.s.enmEffOpSize)
11939 {
11940 case IEMMODE_16BIT:
11941 {
11942 uint16_t u16Value;
11943 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11944 if (rcStrict == VINF_SUCCESS)
11945 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11946 break;
11947 }
11948
11949 case IEMMODE_32BIT:
11950 {
11951 uint32_t u32Value;
11952 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11953 if (rcStrict == VINF_SUCCESS)
11954 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11955 break;
11956 }
11957
11958 case IEMMODE_64BIT:
11959 {
11960 uint64_t u64Value;
11961 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11962 if (rcStrict == VINF_SUCCESS)
11963 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11964 break;
11965 }
11966
11967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11968 }
11969 if (rcStrict == VINF_SUCCESS)
11970 {
11971 pCtx->rsp = TmpRsp.u;
11972 iemRegUpdateRipAndClearRF(pVCpu);
11973 }
11974 return rcStrict;
11975
11976#else
11977 return VERR_IEM_IPE_2;
11978#endif
11979}
11980
11981
11982/** Opcode 0x8f. */
11983FNIEMOP_DEF(iemOp_Grp1A)
11984{
11985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11986 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11987 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11988
11989 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11990 /** @todo XOP decoding. */
11991 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11992 return IEMOP_RAISE_INVALID_OPCODE();
11993}
11994
11995
11996/**
11997 * Common 'xchg reg,rAX' helper.
11998 */
11999FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
12000{
12001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12002
12003 iReg |= pVCpu->iem.s.uRexB;
12004 switch (pVCpu->iem.s.enmEffOpSize)
12005 {
12006 case IEMMODE_16BIT:
12007 IEM_MC_BEGIN(0, 2);
12008 IEM_MC_LOCAL(uint16_t, u16Tmp1);
12009 IEM_MC_LOCAL(uint16_t, u16Tmp2);
12010 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
12011 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
12012 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
12013 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
12014 IEM_MC_ADVANCE_RIP();
12015 IEM_MC_END();
12016 return VINF_SUCCESS;
12017
12018 case IEMMODE_32BIT:
12019 IEM_MC_BEGIN(0, 2);
12020 IEM_MC_LOCAL(uint32_t, u32Tmp1);
12021 IEM_MC_LOCAL(uint32_t, u32Tmp2);
12022 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
12023 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
12024 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
12025 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
12026 IEM_MC_ADVANCE_RIP();
12027 IEM_MC_END();
12028 return VINF_SUCCESS;
12029
12030 case IEMMODE_64BIT:
12031 IEM_MC_BEGIN(0, 2);
12032 IEM_MC_LOCAL(uint64_t, u64Tmp1);
12033 IEM_MC_LOCAL(uint64_t, u64Tmp2);
12034 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
12035 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
12036 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
12037 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
12038 IEM_MC_ADVANCE_RIP();
12039 IEM_MC_END();
12040 return VINF_SUCCESS;
12041
12042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12043 }
12044}
12045
12046
12047/** Opcode 0x90. */
12048FNIEMOP_DEF(iemOp_nop)
12049{
12050 /* R8/R8D and RAX/EAX can be exchanged. */
12051 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
12052 {
12053 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
12054 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
12055 }
12056
12057 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
12058 IEMOP_MNEMONIC(pause, "pause");
12059 else
12060 IEMOP_MNEMONIC(nop, "nop");
12061 IEM_MC_BEGIN(0, 0);
12062 IEM_MC_ADVANCE_RIP();
12063 IEM_MC_END();
12064 return VINF_SUCCESS;
12065}
12066
12067
12068/** Opcode 0x91. */
12069FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
12070{
12071 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
12072 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
12073}
12074
12075
12076/** Opcode 0x92. */
12077FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
12078{
12079 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
12080 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
12081}
12082
12083
12084/** Opcode 0x93. */
12085FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
12086{
12087 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
12088 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
12089}
12090
12091
12092/** Opcode 0x94. */
12093FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
12094{
12095 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
12096 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
12097}
12098
12099
12100/** Opcode 0x95. */
12101FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
12102{
12103 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
12104 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
12105}
12106
12107
12108/** Opcode 0x96. */
12109FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
12110{
12111 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
12112 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
12113}
12114
12115
12116/** Opcode 0x97. */
12117FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
12118{
12119 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
12120 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
12121}
12122
12123
12124/** Opcode 0x98. */
12125FNIEMOP_DEF(iemOp_cbw)
12126{
12127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12128 switch (pVCpu->iem.s.enmEffOpSize)
12129 {
12130 case IEMMODE_16BIT:
12131 IEMOP_MNEMONIC(cbw, "cbw");
12132 IEM_MC_BEGIN(0, 1);
12133 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
12134 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
12135 } IEM_MC_ELSE() {
12136 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
12137 } IEM_MC_ENDIF();
12138 IEM_MC_ADVANCE_RIP();
12139 IEM_MC_END();
12140 return VINF_SUCCESS;
12141
12142 case IEMMODE_32BIT:
12143 IEMOP_MNEMONIC(cwde, "cwde");
12144 IEM_MC_BEGIN(0, 1);
12145 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
12146 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
12147 } IEM_MC_ELSE() {
12148 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
12149 } IEM_MC_ENDIF();
12150 IEM_MC_ADVANCE_RIP();
12151 IEM_MC_END();
12152 return VINF_SUCCESS;
12153
12154 case IEMMODE_64BIT:
12155 IEMOP_MNEMONIC(cdqe, "cdqe");
12156 IEM_MC_BEGIN(0, 1);
12157 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12158 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
12159 } IEM_MC_ELSE() {
12160 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
12161 } IEM_MC_ENDIF();
12162 IEM_MC_ADVANCE_RIP();
12163 IEM_MC_END();
12164 return VINF_SUCCESS;
12165
12166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12167 }
12168}
12169
12170
12171/** Opcode 0x99. */
12172FNIEMOP_DEF(iemOp_cwd)
12173{
12174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12175 switch (pVCpu->iem.s.enmEffOpSize)
12176 {
12177 case IEMMODE_16BIT:
12178 IEMOP_MNEMONIC(cwd, "cwd");
12179 IEM_MC_BEGIN(0, 1);
12180 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
12181 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
12182 } IEM_MC_ELSE() {
12183 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
12184 } IEM_MC_ENDIF();
12185 IEM_MC_ADVANCE_RIP();
12186 IEM_MC_END();
12187 return VINF_SUCCESS;
12188
12189 case IEMMODE_32BIT:
12190 IEMOP_MNEMONIC(cdq, "cdq");
12191 IEM_MC_BEGIN(0, 1);
12192 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
12193 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
12194 } IEM_MC_ELSE() {
12195 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
12196 } IEM_MC_ENDIF();
12197 IEM_MC_ADVANCE_RIP();
12198 IEM_MC_END();
12199 return VINF_SUCCESS;
12200
12201 case IEMMODE_64BIT:
12202 IEMOP_MNEMONIC(cqo, "cqo");
12203 IEM_MC_BEGIN(0, 1);
12204 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
12205 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
12206 } IEM_MC_ELSE() {
12207 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
12208 } IEM_MC_ENDIF();
12209 IEM_MC_ADVANCE_RIP();
12210 IEM_MC_END();
12211 return VINF_SUCCESS;
12212
12213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12214 }
12215}
12216
12217
12218/** Opcode 0x9a. */
12219FNIEMOP_DEF(iemOp_call_Ap)
12220{
12221 IEMOP_MNEMONIC(call_Ap, "call Ap");
12222 IEMOP_HLP_NO_64BIT();
12223
12224 /* Decode the far pointer address and pass it on to the far call C implementation. */
12225 uint32_t offSeg;
12226 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12227 IEM_OPCODE_GET_NEXT_U32(&offSeg);
12228 else
12229 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
12230 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
12231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12232 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
12233}
12234
12235
12236/** Opcode 0x9b. (aka fwait) */
12237FNIEMOP_DEF(iemOp_wait)
12238{
12239 IEMOP_MNEMONIC(wait, "wait");
12240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12241
12242 IEM_MC_BEGIN(0, 0);
12243 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12244 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12245 IEM_MC_ADVANCE_RIP();
12246 IEM_MC_END();
12247 return VINF_SUCCESS;
12248}
12249
12250
12251/** Opcode 0x9c. */
12252FNIEMOP_DEF(iemOp_pushf_Fv)
12253{
12254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12255 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12256 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12257}
12258
12259
12260/** Opcode 0x9d. */
12261FNIEMOP_DEF(iemOp_popf_Fv)
12262{
12263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12264 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12265 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12266}
12267
12268
12269/** Opcode 0x9e. */
12270FNIEMOP_DEF(iemOp_sahf)
12271{
12272 IEMOP_MNEMONIC(sahf, "sahf");
12273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12274 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12275 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12276 return IEMOP_RAISE_INVALID_OPCODE();
12277 IEM_MC_BEGIN(0, 2);
12278 IEM_MC_LOCAL(uint32_t, u32Flags);
12279 IEM_MC_LOCAL(uint32_t, EFlags);
12280 IEM_MC_FETCH_EFLAGS(EFlags);
12281 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12282 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12283 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12284 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12285 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12286 IEM_MC_COMMIT_EFLAGS(EFlags);
12287 IEM_MC_ADVANCE_RIP();
12288 IEM_MC_END();
12289 return VINF_SUCCESS;
12290}
12291
12292
12293/** Opcode 0x9f. */
12294FNIEMOP_DEF(iemOp_lahf)
12295{
12296 IEMOP_MNEMONIC(lahf, "lahf");
12297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12298 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12299 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12300 return IEMOP_RAISE_INVALID_OPCODE();
12301 IEM_MC_BEGIN(0, 1);
12302 IEM_MC_LOCAL(uint8_t, u8Flags);
12303 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12304 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12305 IEM_MC_ADVANCE_RIP();
12306 IEM_MC_END();
12307 return VINF_SUCCESS;
12308}
12309
12310
12311/**
12312 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12313 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12314 * prefixes. Will return on failures.
12315 * @param a_GCPtrMemOff The variable to store the offset in.
12316 */
12317#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12318 do \
12319 { \
12320 switch (pVCpu->iem.s.enmEffAddrMode) \
12321 { \
12322 case IEMMODE_16BIT: \
12323 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12324 break; \
12325 case IEMMODE_32BIT: \
12326 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12327 break; \
12328 case IEMMODE_64BIT: \
12329 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12330 break; \
12331 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12332 } \
12333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12334 } while (0)
12335
12336/** Opcode 0xa0. */
12337FNIEMOP_DEF(iemOp_mov_Al_Ob)
12338{
12339 /*
12340 * Get the offset and fend of lock prefixes.
12341 */
12342 RTGCPTR GCPtrMemOff;
12343 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12344
12345 /*
12346 * Fetch AL.
12347 */
12348 IEM_MC_BEGIN(0,1);
12349 IEM_MC_LOCAL(uint8_t, u8Tmp);
12350 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12351 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12352 IEM_MC_ADVANCE_RIP();
12353 IEM_MC_END();
12354 return VINF_SUCCESS;
12355}
12356
12357
12358/** Opcode 0xa1. */
12359FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12360{
12361 /*
12362 * Get the offset and fend of lock prefixes.
12363 */
12364 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12365 RTGCPTR GCPtrMemOff;
12366 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12367
12368 /*
12369 * Fetch rAX.
12370 */
12371 switch (pVCpu->iem.s.enmEffOpSize)
12372 {
12373 case IEMMODE_16BIT:
12374 IEM_MC_BEGIN(0,1);
12375 IEM_MC_LOCAL(uint16_t, u16Tmp);
12376 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12377 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12378 IEM_MC_ADVANCE_RIP();
12379 IEM_MC_END();
12380 return VINF_SUCCESS;
12381
12382 case IEMMODE_32BIT:
12383 IEM_MC_BEGIN(0,1);
12384 IEM_MC_LOCAL(uint32_t, u32Tmp);
12385 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12386 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12387 IEM_MC_ADVANCE_RIP();
12388 IEM_MC_END();
12389 return VINF_SUCCESS;
12390
12391 case IEMMODE_64BIT:
12392 IEM_MC_BEGIN(0,1);
12393 IEM_MC_LOCAL(uint64_t, u64Tmp);
12394 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12395 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12396 IEM_MC_ADVANCE_RIP();
12397 IEM_MC_END();
12398 return VINF_SUCCESS;
12399
12400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12401 }
12402}
12403
12404
12405/** Opcode 0xa2. */
12406FNIEMOP_DEF(iemOp_mov_Ob_AL)
12407{
12408 /*
12409 * Get the offset and fend of lock prefixes.
12410 */
12411 RTGCPTR GCPtrMemOff;
12412 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12413
12414 /*
12415 * Store AL.
12416 */
12417 IEM_MC_BEGIN(0,1);
12418 IEM_MC_LOCAL(uint8_t, u8Tmp);
12419 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12420 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12421 IEM_MC_ADVANCE_RIP();
12422 IEM_MC_END();
12423 return VINF_SUCCESS;
12424}
12425
12426
12427/** Opcode 0xa3. */
12428FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12429{
12430 /*
12431 * Get the offset and fend of lock prefixes.
12432 */
12433 RTGCPTR GCPtrMemOff;
12434 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12435
12436 /*
12437 * Store rAX.
12438 */
12439 switch (pVCpu->iem.s.enmEffOpSize)
12440 {
12441 case IEMMODE_16BIT:
12442 IEM_MC_BEGIN(0,1);
12443 IEM_MC_LOCAL(uint16_t, u16Tmp);
12444 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12445 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12446 IEM_MC_ADVANCE_RIP();
12447 IEM_MC_END();
12448 return VINF_SUCCESS;
12449
12450 case IEMMODE_32BIT:
12451 IEM_MC_BEGIN(0,1);
12452 IEM_MC_LOCAL(uint32_t, u32Tmp);
12453 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12454 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12455 IEM_MC_ADVANCE_RIP();
12456 IEM_MC_END();
12457 return VINF_SUCCESS;
12458
12459 case IEMMODE_64BIT:
12460 IEM_MC_BEGIN(0,1);
12461 IEM_MC_LOCAL(uint64_t, u64Tmp);
12462 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12463 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12464 IEM_MC_ADVANCE_RIP();
12465 IEM_MC_END();
12466 return VINF_SUCCESS;
12467
12468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12469 }
12470}
12471
12472/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12473#define IEM_MOVS_CASE(ValBits, AddrBits) \
12474 IEM_MC_BEGIN(0, 2); \
12475 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12476 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12477 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12478 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12479 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12480 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12482 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12483 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12484 } IEM_MC_ELSE() { \
12485 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12486 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12487 } IEM_MC_ENDIF(); \
12488 IEM_MC_ADVANCE_RIP(); \
12489 IEM_MC_END();
12490
12491/** Opcode 0xa4. */
12492FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12493{
12494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12495
12496 /*
12497 * Use the C implementation if a repeat prefix is encountered.
12498 */
12499 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12500 {
12501 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12502 switch (pVCpu->iem.s.enmEffAddrMode)
12503 {
12504 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12505 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12506 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12508 }
12509 }
12510 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12511
12512 /*
12513 * Sharing case implementation with movs[wdq] below.
12514 */
12515 switch (pVCpu->iem.s.enmEffAddrMode)
12516 {
12517 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12518 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12519 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12521 }
12522 return VINF_SUCCESS;
12523}
12524
12525
12526/** Opcode 0xa5. */
12527FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12528{
12529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12530
12531 /*
12532 * Use the C implementation if a repeat prefix is encountered.
12533 */
12534 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12535 {
12536 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12537 switch (pVCpu->iem.s.enmEffOpSize)
12538 {
12539 case IEMMODE_16BIT:
12540 switch (pVCpu->iem.s.enmEffAddrMode)
12541 {
12542 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12543 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12544 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12546 }
12547 break;
12548 case IEMMODE_32BIT:
12549 switch (pVCpu->iem.s.enmEffAddrMode)
12550 {
12551 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12552 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12553 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12555 }
12556 case IEMMODE_64BIT:
12557 switch (pVCpu->iem.s.enmEffAddrMode)
12558 {
12559 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12560 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12561 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12563 }
12564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12565 }
12566 }
12567 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12568
12569 /*
12570 * Annoying double switch here.
12571 * Using ugly macro for implementing the cases, sharing it with movsb.
12572 */
12573 switch (pVCpu->iem.s.enmEffOpSize)
12574 {
12575 case IEMMODE_16BIT:
12576 switch (pVCpu->iem.s.enmEffAddrMode)
12577 {
12578 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12579 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12580 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12582 }
12583 break;
12584
12585 case IEMMODE_32BIT:
12586 switch (pVCpu->iem.s.enmEffAddrMode)
12587 {
12588 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12589 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12590 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12592 }
12593 break;
12594
12595 case IEMMODE_64BIT:
12596 switch (pVCpu->iem.s.enmEffAddrMode)
12597 {
12598 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12599 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12600 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12602 }
12603 break;
12604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12605 }
12606 return VINF_SUCCESS;
12607}
12608
12609#undef IEM_MOVS_CASE
12610
12611/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12612#define IEM_CMPS_CASE(ValBits, AddrBits) \
12613 IEM_MC_BEGIN(3, 3); \
12614 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12615 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12616 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12617 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12618 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12619 \
12620 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12621 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12622 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12623 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12624 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12625 IEM_MC_REF_EFLAGS(pEFlags); \
12626 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12627 \
12628 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12629 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12630 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12631 } IEM_MC_ELSE() { \
12632 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12633 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12634 } IEM_MC_ENDIF(); \
12635 IEM_MC_ADVANCE_RIP(); \
12636 IEM_MC_END(); \
12637
12638/** Opcode 0xa6. */
12639FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12640{
12641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12642
12643 /*
12644 * Use the C implementation if a repeat prefix is encountered.
12645 */
12646 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12647 {
12648 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12649 switch (pVCpu->iem.s.enmEffAddrMode)
12650 {
12651 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12652 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12653 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12655 }
12656 }
12657 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12658 {
12659 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12660 switch (pVCpu->iem.s.enmEffAddrMode)
12661 {
12662 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12663 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12664 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12666 }
12667 }
12668 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12669
12670 /*
12671 * Sharing case implementation with cmps[wdq] below.
12672 */
12673 switch (pVCpu->iem.s.enmEffAddrMode)
12674 {
12675 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12676 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12677 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12679 }
12680 return VINF_SUCCESS;
12681
12682}
12683
12684
12685/** Opcode 0xa7. */
12686FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12687{
12688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12689
12690 /*
12691 * Use the C implementation if a repeat prefix is encountered.
12692 */
12693 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12694 {
12695 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12696 switch (pVCpu->iem.s.enmEffOpSize)
12697 {
12698 case IEMMODE_16BIT:
12699 switch (pVCpu->iem.s.enmEffAddrMode)
12700 {
12701 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12702 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12703 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12705 }
12706 break;
12707 case IEMMODE_32BIT:
12708 switch (pVCpu->iem.s.enmEffAddrMode)
12709 {
12710 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12711 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12712 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12714 }
12715 case IEMMODE_64BIT:
12716 switch (pVCpu->iem.s.enmEffAddrMode)
12717 {
12718 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12719 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12720 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12722 }
12723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12724 }
12725 }
12726
12727 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12728 {
12729 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12730 switch (pVCpu->iem.s.enmEffOpSize)
12731 {
12732 case IEMMODE_16BIT:
12733 switch (pVCpu->iem.s.enmEffAddrMode)
12734 {
12735 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12736 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12737 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12739 }
12740 break;
12741 case IEMMODE_32BIT:
12742 switch (pVCpu->iem.s.enmEffAddrMode)
12743 {
12744 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12748 }
12749 case IEMMODE_64BIT:
12750 switch (pVCpu->iem.s.enmEffAddrMode)
12751 {
12752 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12753 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12754 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12756 }
12757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12758 }
12759 }
12760
12761 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12762
12763 /*
12764 * Annoying double switch here.
12765 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12766 */
12767 switch (pVCpu->iem.s.enmEffOpSize)
12768 {
12769 case IEMMODE_16BIT:
12770 switch (pVCpu->iem.s.enmEffAddrMode)
12771 {
12772 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12773 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12774 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12776 }
12777 break;
12778
12779 case IEMMODE_32BIT:
12780 switch (pVCpu->iem.s.enmEffAddrMode)
12781 {
12782 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12783 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12784 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12786 }
12787 break;
12788
12789 case IEMMODE_64BIT:
12790 switch (pVCpu->iem.s.enmEffAddrMode)
12791 {
12792 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12793 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12794 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12796 }
12797 break;
12798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12799 }
12800 return VINF_SUCCESS;
12801
12802}
12803
12804#undef IEM_CMPS_CASE
12805
12806/** Opcode 0xa8. */
12807FNIEMOP_DEF(iemOp_test_AL_Ib)
12808{
12809 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12810 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12811 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12812}
12813
12814
12815/** Opcode 0xa9. */
12816FNIEMOP_DEF(iemOp_test_eAX_Iz)
12817{
12818 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12819 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12820 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12821}
12822
12823
12824/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12825#define IEM_STOS_CASE(ValBits, AddrBits) \
12826 IEM_MC_BEGIN(0, 2); \
12827 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12828 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12829 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12830 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12831 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12832 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12833 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12834 } IEM_MC_ELSE() { \
12835 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12836 } IEM_MC_ENDIF(); \
12837 IEM_MC_ADVANCE_RIP(); \
12838 IEM_MC_END(); \
12839
12840/** Opcode 0xaa. */
12841FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12842{
12843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12844
12845 /*
12846 * Use the C implementation if a repeat prefix is encountered.
12847 */
12848 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12849 {
12850 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12851 switch (pVCpu->iem.s.enmEffAddrMode)
12852 {
12853 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12854 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12855 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12857 }
12858 }
12859 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12860
12861 /*
12862 * Sharing case implementation with stos[wdq] below.
12863 */
12864 switch (pVCpu->iem.s.enmEffAddrMode)
12865 {
12866 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12867 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12868 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12870 }
12871 return VINF_SUCCESS;
12872}
12873
12874
12875/** Opcode 0xab. */
12876FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12877{
12878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12879
12880 /*
12881 * Use the C implementation if a repeat prefix is encountered.
12882 */
12883 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12884 {
12885 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12886 switch (pVCpu->iem.s.enmEffOpSize)
12887 {
12888 case IEMMODE_16BIT:
12889 switch (pVCpu->iem.s.enmEffAddrMode)
12890 {
12891 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12892 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12893 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12895 }
12896 break;
12897 case IEMMODE_32BIT:
12898 switch (pVCpu->iem.s.enmEffAddrMode)
12899 {
12900 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12901 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12902 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12904 }
12905 case IEMMODE_64BIT:
12906 switch (pVCpu->iem.s.enmEffAddrMode)
12907 {
12908 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12909 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12910 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12912 }
12913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12914 }
12915 }
12916 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12917
12918 /*
12919 * Annoying double switch here.
12920 * Using ugly macro for implementing the cases, sharing it with stosb.
12921 */
12922 switch (pVCpu->iem.s.enmEffOpSize)
12923 {
12924 case IEMMODE_16BIT:
12925 switch (pVCpu->iem.s.enmEffAddrMode)
12926 {
12927 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12928 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12929 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12931 }
12932 break;
12933
12934 case IEMMODE_32BIT:
12935 switch (pVCpu->iem.s.enmEffAddrMode)
12936 {
12937 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12938 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12939 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12941 }
12942 break;
12943
12944 case IEMMODE_64BIT:
12945 switch (pVCpu->iem.s.enmEffAddrMode)
12946 {
12947 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12948 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12949 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12951 }
12952 break;
12953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12954 }
12955 return VINF_SUCCESS;
12956}
12957
12958#undef IEM_STOS_CASE
12959
12960/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12961#define IEM_LODS_CASE(ValBits, AddrBits) \
12962 IEM_MC_BEGIN(0, 2); \
12963 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12964 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12965 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12966 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12967 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12968 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12969 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12970 } IEM_MC_ELSE() { \
12971 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12972 } IEM_MC_ENDIF(); \
12973 IEM_MC_ADVANCE_RIP(); \
12974 IEM_MC_END();
12975
12976/** Opcode 0xac. */
12977FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12978{
12979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12980
12981 /*
12982 * Use the C implementation if a repeat prefix is encountered.
12983 */
12984 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12985 {
12986 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12987 switch (pVCpu->iem.s.enmEffAddrMode)
12988 {
12989 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12990 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12991 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12993 }
12994 }
12995 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12996
12997 /*
12998 * Sharing case implementation with stos[wdq] below.
12999 */
13000 switch (pVCpu->iem.s.enmEffAddrMode)
13001 {
13002 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
13003 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
13004 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
13005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13006 }
13007 return VINF_SUCCESS;
13008}
13009
13010
13011/** Opcode 0xad. */
13012FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
13013{
13014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13015
13016 /*
13017 * Use the C implementation if a repeat prefix is encountered.
13018 */
13019 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
13020 {
13021 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
13022 switch (pVCpu->iem.s.enmEffOpSize)
13023 {
13024 case IEMMODE_16BIT:
13025 switch (pVCpu->iem.s.enmEffAddrMode)
13026 {
13027 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
13028 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
13029 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
13030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13031 }
13032 break;
13033 case IEMMODE_32BIT:
13034 switch (pVCpu->iem.s.enmEffAddrMode)
13035 {
13036 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
13037 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
13038 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
13039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13040 }
13041 case IEMMODE_64BIT:
13042 switch (pVCpu->iem.s.enmEffAddrMode)
13043 {
13044 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
13045 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
13046 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
13047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13048 }
13049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13050 }
13051 }
13052 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
13053
13054 /*
13055 * Annoying double switch here.
13056 * Using ugly macro for implementing the cases, sharing it with lodsb.
13057 */
13058 switch (pVCpu->iem.s.enmEffOpSize)
13059 {
13060 case IEMMODE_16BIT:
13061 switch (pVCpu->iem.s.enmEffAddrMode)
13062 {
13063 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
13064 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
13065 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
13066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13067 }
13068 break;
13069
13070 case IEMMODE_32BIT:
13071 switch (pVCpu->iem.s.enmEffAddrMode)
13072 {
13073 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
13074 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
13075 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
13076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13077 }
13078 break;
13079
13080 case IEMMODE_64BIT:
13081 switch (pVCpu->iem.s.enmEffAddrMode)
13082 {
13083 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13084 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
13085 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
13086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13087 }
13088 break;
13089 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13090 }
13091 return VINF_SUCCESS;
13092}
13093
13094#undef IEM_LODS_CASE
13095
13096/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
13097#define IEM_SCAS_CASE(ValBits, AddrBits) \
13098 IEM_MC_BEGIN(3, 2); \
13099 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
13100 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
13101 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13102 IEM_MC_LOCAL(RTGCPTR, uAddr); \
13103 \
13104 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
13105 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
13106 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
13107 IEM_MC_REF_EFLAGS(pEFlags); \
13108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
13109 \
13110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
13111 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
13112 } IEM_MC_ELSE() { \
13113 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
13114 } IEM_MC_ENDIF(); \
13115 IEM_MC_ADVANCE_RIP(); \
13116 IEM_MC_END();
13117
13118/** Opcode 0xae. */
13119FNIEMOP_DEF(iemOp_scasb_AL_Xb)
13120{
13121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13122
13123 /*
13124 * Use the C implementation if a repeat prefix is encountered.
13125 */
13126 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
13127 {
13128 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
13129 switch (pVCpu->iem.s.enmEffAddrMode)
13130 {
13131 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
13132 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
13133 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
13134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13135 }
13136 }
13137 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13138 {
13139 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
13140 switch (pVCpu->iem.s.enmEffAddrMode)
13141 {
13142 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
13143 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
13144 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
13145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13146 }
13147 }
13148 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
13149
13150 /*
13151 * Sharing case implementation with stos[wdq] below.
13152 */
13153 switch (pVCpu->iem.s.enmEffAddrMode)
13154 {
13155 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
13156 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
13157 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
13158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13159 }
13160 return VINF_SUCCESS;
13161}
13162
13163
13164/** Opcode 0xaf. */
13165FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
13166{
13167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13168
13169 /*
13170 * Use the C implementation if a repeat prefix is encountered.
13171 */
13172 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
13173 {
13174 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
13175 switch (pVCpu->iem.s.enmEffOpSize)
13176 {
13177 case IEMMODE_16BIT:
13178 switch (pVCpu->iem.s.enmEffAddrMode)
13179 {
13180 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
13181 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
13182 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
13183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13184 }
13185 break;
13186 case IEMMODE_32BIT:
13187 switch (pVCpu->iem.s.enmEffAddrMode)
13188 {
13189 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
13190 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
13191 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
13192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13193 }
13194 case IEMMODE_64BIT:
13195 switch (pVCpu->iem.s.enmEffAddrMode)
13196 {
13197 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
13198 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
13199 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
13200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13201 }
13202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13203 }
13204 }
13205 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
13206 {
13207 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
13208 switch (pVCpu->iem.s.enmEffOpSize)
13209 {
13210 case IEMMODE_16BIT:
13211 switch (pVCpu->iem.s.enmEffAddrMode)
13212 {
13213 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
13214 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
13215 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
13216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13217 }
13218 break;
13219 case IEMMODE_32BIT:
13220 switch (pVCpu->iem.s.enmEffAddrMode)
13221 {
13222 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
13223 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
13224 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
13225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13226 }
13227 case IEMMODE_64BIT:
13228 switch (pVCpu->iem.s.enmEffAddrMode)
13229 {
13230 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
13231 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
13232 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
13233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13234 }
13235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13236 }
13237 }
13238 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
13239
13240 /*
13241 * Annoying double switch here.
13242 * Using ugly macro for implementing the cases, sharing it with scasb.
13243 */
13244 switch (pVCpu->iem.s.enmEffOpSize)
13245 {
13246 case IEMMODE_16BIT:
13247 switch (pVCpu->iem.s.enmEffAddrMode)
13248 {
13249 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13250 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13251 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13253 }
13254 break;
13255
13256 case IEMMODE_32BIT:
13257 switch (pVCpu->iem.s.enmEffAddrMode)
13258 {
13259 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13260 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13261 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13263 }
13264 break;
13265
13266 case IEMMODE_64BIT:
13267 switch (pVCpu->iem.s.enmEffAddrMode)
13268 {
13269 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13270 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13271 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13273 }
13274 break;
13275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13276 }
13277 return VINF_SUCCESS;
13278}
13279
13280#undef IEM_SCAS_CASE
13281
13282/**
13283 * Common 'mov r8, imm8' helper.
13284 */
13285FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13286{
13287 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13289
13290 IEM_MC_BEGIN(0, 1);
13291 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13292 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13293 IEM_MC_ADVANCE_RIP();
13294 IEM_MC_END();
13295
13296 return VINF_SUCCESS;
13297}
13298
13299
13300/** Opcode 0xb0. */
13301FNIEMOP_DEF(iemOp_mov_AL_Ib)
13302{
13303 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13304 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13305}
13306
13307
13308/** Opcode 0xb1. */
13309FNIEMOP_DEF(iemOp_CL_Ib)
13310{
13311 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13312 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13313}
13314
13315
13316/** Opcode 0xb2. */
13317FNIEMOP_DEF(iemOp_DL_Ib)
13318{
13319 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13320 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13321}
13322
13323
13324/** Opcode 0xb3. */
13325FNIEMOP_DEF(iemOp_BL_Ib)
13326{
13327 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13328 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13329}
13330
13331
13332/** Opcode 0xb4. */
13333FNIEMOP_DEF(iemOp_mov_AH_Ib)
13334{
13335 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13336 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13337}
13338
13339
13340/** Opcode 0xb5. */
13341FNIEMOP_DEF(iemOp_CH_Ib)
13342{
13343 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13344 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13345}
13346
13347
13348/** Opcode 0xb6. */
13349FNIEMOP_DEF(iemOp_DH_Ib)
13350{
13351 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13352 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13353}
13354
13355
13356/** Opcode 0xb7. */
13357FNIEMOP_DEF(iemOp_BH_Ib)
13358{
13359 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13360 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13361}
13362
13363
13364/**
13365 * Common 'mov regX,immX' helper.
13366 */
13367FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13368{
13369 switch (pVCpu->iem.s.enmEffOpSize)
13370 {
13371 case IEMMODE_16BIT:
13372 {
13373 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13375
13376 IEM_MC_BEGIN(0, 1);
13377 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13378 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13379 IEM_MC_ADVANCE_RIP();
13380 IEM_MC_END();
13381 break;
13382 }
13383
13384 case IEMMODE_32BIT:
13385 {
13386 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13388
13389 IEM_MC_BEGIN(0, 1);
13390 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13391 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13392 IEM_MC_ADVANCE_RIP();
13393 IEM_MC_END();
13394 break;
13395 }
13396 case IEMMODE_64BIT:
13397 {
13398 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13400
13401 IEM_MC_BEGIN(0, 1);
13402 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13403 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13404 IEM_MC_ADVANCE_RIP();
13405 IEM_MC_END();
13406 break;
13407 }
13408 }
13409
13410 return VINF_SUCCESS;
13411}
13412
13413
13414/** Opcode 0xb8. */
13415FNIEMOP_DEF(iemOp_eAX_Iv)
13416{
13417 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13418 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13419}
13420
13421
13422/** Opcode 0xb9. */
13423FNIEMOP_DEF(iemOp_eCX_Iv)
13424{
13425 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13426 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13427}
13428
13429
13430/** Opcode 0xba. */
13431FNIEMOP_DEF(iemOp_eDX_Iv)
13432{
13433 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13434 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13435}
13436
13437
13438/** Opcode 0xbb. */
13439FNIEMOP_DEF(iemOp_eBX_Iv)
13440{
13441 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13442 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13443}
13444
13445
13446/** Opcode 0xbc. */
13447FNIEMOP_DEF(iemOp_eSP_Iv)
13448{
13449 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13450 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13451}
13452
13453
13454/** Opcode 0xbd. */
13455FNIEMOP_DEF(iemOp_eBP_Iv)
13456{
13457 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13458 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13459}
13460
13461
13462/** Opcode 0xbe. */
13463FNIEMOP_DEF(iemOp_eSI_Iv)
13464{
13465 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13466 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13467}
13468
13469
13470/** Opcode 0xbf. */
13471FNIEMOP_DEF(iemOp_eDI_Iv)
13472{
13473 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13474 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13475}
13476
13477
13478/** Opcode 0xc0. */
13479FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13480{
13481 IEMOP_HLP_MIN_186();
13482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13483 PCIEMOPSHIFTSIZES pImpl;
13484 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13485 {
13486 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13487 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13488 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13489 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13490 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13491 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13492 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13493 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13494 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13495 }
13496 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13497
13498 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13499 {
13500 /* register */
13501 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13503 IEM_MC_BEGIN(3, 0);
13504 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13505 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13506 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13507 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13508 IEM_MC_REF_EFLAGS(pEFlags);
13509 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13510 IEM_MC_ADVANCE_RIP();
13511 IEM_MC_END();
13512 }
13513 else
13514 {
13515 /* memory */
13516 IEM_MC_BEGIN(3, 2);
13517 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13518 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13519 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13521
13522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13523 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13524 IEM_MC_ASSIGN(cShiftArg, cShift);
13525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13526 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13527 IEM_MC_FETCH_EFLAGS(EFlags);
13528 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13529
13530 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13531 IEM_MC_COMMIT_EFLAGS(EFlags);
13532 IEM_MC_ADVANCE_RIP();
13533 IEM_MC_END();
13534 }
13535 return VINF_SUCCESS;
13536}
13537
13538
13539/** Opcode 0xc1. */
13540FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13541{
13542 IEMOP_HLP_MIN_186();
13543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13544 PCIEMOPSHIFTSIZES pImpl;
13545 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13546 {
13547 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13548 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13549 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13550 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13551 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13552 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13553 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13554 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13555 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13556 }
13557 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13558
13559 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13560 {
13561 /* register */
13562 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13564 switch (pVCpu->iem.s.enmEffOpSize)
13565 {
13566 case IEMMODE_16BIT:
13567 IEM_MC_BEGIN(3, 0);
13568 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13569 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13570 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13571 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13572 IEM_MC_REF_EFLAGS(pEFlags);
13573 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13574 IEM_MC_ADVANCE_RIP();
13575 IEM_MC_END();
13576 return VINF_SUCCESS;
13577
13578 case IEMMODE_32BIT:
13579 IEM_MC_BEGIN(3, 0);
13580 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13581 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13582 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13583 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13584 IEM_MC_REF_EFLAGS(pEFlags);
13585 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13586 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13587 IEM_MC_ADVANCE_RIP();
13588 IEM_MC_END();
13589 return VINF_SUCCESS;
13590
13591 case IEMMODE_64BIT:
13592 IEM_MC_BEGIN(3, 0);
13593 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13594 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13595 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13596 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13597 IEM_MC_REF_EFLAGS(pEFlags);
13598 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13599 IEM_MC_ADVANCE_RIP();
13600 IEM_MC_END();
13601 return VINF_SUCCESS;
13602
13603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13604 }
13605 }
13606 else
13607 {
13608 /* memory */
13609 switch (pVCpu->iem.s.enmEffOpSize)
13610 {
13611 case IEMMODE_16BIT:
13612 IEM_MC_BEGIN(3, 2);
13613 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13614 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13615 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13617
13618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13619 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13620 IEM_MC_ASSIGN(cShiftArg, cShift);
13621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13622 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13623 IEM_MC_FETCH_EFLAGS(EFlags);
13624 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13625
13626 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13627 IEM_MC_COMMIT_EFLAGS(EFlags);
13628 IEM_MC_ADVANCE_RIP();
13629 IEM_MC_END();
13630 return VINF_SUCCESS;
13631
13632 case IEMMODE_32BIT:
13633 IEM_MC_BEGIN(3, 2);
13634 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13635 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13636 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13638
13639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13640 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13641 IEM_MC_ASSIGN(cShiftArg, cShift);
13642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13643 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13644 IEM_MC_FETCH_EFLAGS(EFlags);
13645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13646
13647 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13648 IEM_MC_COMMIT_EFLAGS(EFlags);
13649 IEM_MC_ADVANCE_RIP();
13650 IEM_MC_END();
13651 return VINF_SUCCESS;
13652
13653 case IEMMODE_64BIT:
13654 IEM_MC_BEGIN(3, 2);
13655 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13656 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13657 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13659
13660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13661 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13662 IEM_MC_ASSIGN(cShiftArg, cShift);
13663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13664 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13665 IEM_MC_FETCH_EFLAGS(EFlags);
13666 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13667
13668 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13669 IEM_MC_COMMIT_EFLAGS(EFlags);
13670 IEM_MC_ADVANCE_RIP();
13671 IEM_MC_END();
13672 return VINF_SUCCESS;
13673
13674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13675 }
13676 }
13677}
13678
13679
13680/** Opcode 0xc2. */
13681FNIEMOP_DEF(iemOp_retn_Iw)
13682{
13683 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13684 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13686 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13687 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13688}
13689
13690
13691/** Opcode 0xc3. */
13692FNIEMOP_DEF(iemOp_retn)
13693{
13694 IEMOP_MNEMONIC(retn, "retn");
13695 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13697 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13698}
13699
13700
13701/** Opcode 0xc4. */
13702FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13703{
13704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13705 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13706 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13707 {
13708 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13709 /* The LES instruction is invalid 64-bit mode. In legacy and
13710 compatability mode it is invalid with MOD=3.
13711 The use as a VEX prefix is made possible by assigning the inverted
13712 REX.R to the top MOD bit, and the top bit in the inverted register
13713 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13714 to accessing registers 0..7 in this VEX form. */
13715 /** @todo VEX: Just use new tables for it. */
13716 return IEMOP_RAISE_INVALID_OPCODE();
13717 }
13718 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13719 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13720}
13721
13722
13723/** Opcode 0xc5. */
13724FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13725{
13726 /* The LDS instruction is invalid 64-bit mode. In legacy and
13727 compatability mode it is invalid with MOD=3.
13728 The use as a VEX prefix is made possible by assigning the inverted
13729 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13730 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13732 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13733 {
13734 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13735 {
13736 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13737 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13738 }
13739 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13740 }
13741
13742 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13743 /** @todo Test when exctly the VEX conformance checks kick in during
13744 * instruction decoding and fetching (using \#PF). */
13745 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13746 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13747 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13748#if 0 /* will make sense of this next week... */
13749 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13750 &&
13751 )
13752 {
13753
13754 }
13755#endif
13756
13757 /** @todo VEX: Just use new tables for it. */
13758 return IEMOP_RAISE_INVALID_OPCODE();
13759}
13760
13761
13762/** Opcode 0xc6. */
13763FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13764{
13765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13766 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13767 return IEMOP_RAISE_INVALID_OPCODE();
13768 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13769
13770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13771 {
13772 /* register access */
13773 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13775 IEM_MC_BEGIN(0, 0);
13776 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13777 IEM_MC_ADVANCE_RIP();
13778 IEM_MC_END();
13779 }
13780 else
13781 {
13782 /* memory access. */
13783 IEM_MC_BEGIN(0, 1);
13784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13786 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13788 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13789 IEM_MC_ADVANCE_RIP();
13790 IEM_MC_END();
13791 }
13792 return VINF_SUCCESS;
13793}
13794
13795
13796/** Opcode 0xc7. */
13797FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13798{
13799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13800 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13801 return IEMOP_RAISE_INVALID_OPCODE();
13802 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13803
13804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13805 {
13806 /* register access */
13807 switch (pVCpu->iem.s.enmEffOpSize)
13808 {
13809 case IEMMODE_16BIT:
13810 IEM_MC_BEGIN(0, 0);
13811 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13813 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13814 IEM_MC_ADVANCE_RIP();
13815 IEM_MC_END();
13816 return VINF_SUCCESS;
13817
13818 case IEMMODE_32BIT:
13819 IEM_MC_BEGIN(0, 0);
13820 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13822 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13823 IEM_MC_ADVANCE_RIP();
13824 IEM_MC_END();
13825 return VINF_SUCCESS;
13826
13827 case IEMMODE_64BIT:
13828 IEM_MC_BEGIN(0, 0);
13829 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13831 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13832 IEM_MC_ADVANCE_RIP();
13833 IEM_MC_END();
13834 return VINF_SUCCESS;
13835
13836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13837 }
13838 }
13839 else
13840 {
13841 /* memory access. */
13842 switch (pVCpu->iem.s.enmEffOpSize)
13843 {
13844 case IEMMODE_16BIT:
13845 IEM_MC_BEGIN(0, 1);
13846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13848 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13850 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13851 IEM_MC_ADVANCE_RIP();
13852 IEM_MC_END();
13853 return VINF_SUCCESS;
13854
13855 case IEMMODE_32BIT:
13856 IEM_MC_BEGIN(0, 1);
13857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13859 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13861 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13862 IEM_MC_ADVANCE_RIP();
13863 IEM_MC_END();
13864 return VINF_SUCCESS;
13865
13866 case IEMMODE_64BIT:
13867 IEM_MC_BEGIN(0, 1);
13868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13870 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13872 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13873 IEM_MC_ADVANCE_RIP();
13874 IEM_MC_END();
13875 return VINF_SUCCESS;
13876
13877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13878 }
13879 }
13880}
13881
13882
13883
13884
13885/** Opcode 0xc8. */
13886FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13887{
13888 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13889 IEMOP_HLP_MIN_186();
13890 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13891 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13892 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13894 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13895}
13896
13897
13898/** Opcode 0xc9. */
13899FNIEMOP_DEF(iemOp_leave)
13900{
13901 IEMOP_MNEMONIC(leave, "leave");
13902 IEMOP_HLP_MIN_186();
13903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13905 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13906}
13907
13908
13909/** Opcode 0xca. */
13910FNIEMOP_DEF(iemOp_retf_Iw)
13911{
13912 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13913 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13916 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13917}
13918
13919
13920/** Opcode 0xcb. */
13921FNIEMOP_DEF(iemOp_retf)
13922{
13923 IEMOP_MNEMONIC(retf, "retf");
13924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13925 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13926 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13927}
13928
13929
13930/** Opcode 0xcc. */
13931FNIEMOP_DEF(iemOp_int_3)
13932{
13933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13934 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13935}
13936
13937
13938/** Opcode 0xcd. */
13939FNIEMOP_DEF(iemOp_int_Ib)
13940{
13941 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13943 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13944}
13945
13946
13947/** Opcode 0xce. */
13948FNIEMOP_DEF(iemOp_into)
13949{
13950 IEMOP_MNEMONIC(into, "into");
13951 IEMOP_HLP_NO_64BIT();
13952
13953 IEM_MC_BEGIN(2, 0);
13954 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13955 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13956 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13957 IEM_MC_END();
13958 return VINF_SUCCESS;
13959}
13960
13961
13962/** Opcode 0xcf. */
13963FNIEMOP_DEF(iemOp_iret)
13964{
13965 IEMOP_MNEMONIC(iret, "iret");
13966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13967 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13968}
13969
13970
13971/** Opcode 0xd0. */
13972FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13973{
13974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13975 PCIEMOPSHIFTSIZES pImpl;
13976 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13977 {
13978 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13979 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13980 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13981 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13982 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13983 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13984 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13985 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13986 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13987 }
13988 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13989
13990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13991 {
13992 /* register */
13993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13994 IEM_MC_BEGIN(3, 0);
13995 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13996 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13998 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13999 IEM_MC_REF_EFLAGS(pEFlags);
14000 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14001 IEM_MC_ADVANCE_RIP();
14002 IEM_MC_END();
14003 }
14004 else
14005 {
14006 /* memory */
14007 IEM_MC_BEGIN(3, 2);
14008 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14009 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
14010 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14012
14013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14015 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14016 IEM_MC_FETCH_EFLAGS(EFlags);
14017 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14018
14019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14020 IEM_MC_COMMIT_EFLAGS(EFlags);
14021 IEM_MC_ADVANCE_RIP();
14022 IEM_MC_END();
14023 }
14024 return VINF_SUCCESS;
14025}
14026
14027
14028
14029/** Opcode 0xd1. */
14030FNIEMOP_DEF(iemOp_Grp2_Ev_1)
14031{
14032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14033 PCIEMOPSHIFTSIZES pImpl;
14034 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14035 {
14036 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
14037 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
14038 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
14039 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
14040 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
14041 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
14042 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
14043 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14044 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
14045 }
14046 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14047
14048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14049 {
14050 /* register */
14051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14052 switch (pVCpu->iem.s.enmEffOpSize)
14053 {
14054 case IEMMODE_16BIT:
14055 IEM_MC_BEGIN(3, 0);
14056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14057 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14059 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14060 IEM_MC_REF_EFLAGS(pEFlags);
14061 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14062 IEM_MC_ADVANCE_RIP();
14063 IEM_MC_END();
14064 return VINF_SUCCESS;
14065
14066 case IEMMODE_32BIT:
14067 IEM_MC_BEGIN(3, 0);
14068 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14069 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14070 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14071 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14072 IEM_MC_REF_EFLAGS(pEFlags);
14073 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14074 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14075 IEM_MC_ADVANCE_RIP();
14076 IEM_MC_END();
14077 return VINF_SUCCESS;
14078
14079 case IEMMODE_64BIT:
14080 IEM_MC_BEGIN(3, 0);
14081 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14082 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14084 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14085 IEM_MC_REF_EFLAGS(pEFlags);
14086 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14087 IEM_MC_ADVANCE_RIP();
14088 IEM_MC_END();
14089 return VINF_SUCCESS;
14090
14091 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14092 }
14093 }
14094 else
14095 {
14096 /* memory */
14097 switch (pVCpu->iem.s.enmEffOpSize)
14098 {
14099 case IEMMODE_16BIT:
14100 IEM_MC_BEGIN(3, 2);
14101 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14102 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14103 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14105
14106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14108 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14109 IEM_MC_FETCH_EFLAGS(EFlags);
14110 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14111
14112 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14113 IEM_MC_COMMIT_EFLAGS(EFlags);
14114 IEM_MC_ADVANCE_RIP();
14115 IEM_MC_END();
14116 return VINF_SUCCESS;
14117
14118 case IEMMODE_32BIT:
14119 IEM_MC_BEGIN(3, 2);
14120 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14121 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14122 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14124
14125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14127 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14128 IEM_MC_FETCH_EFLAGS(EFlags);
14129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14130
14131 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14132 IEM_MC_COMMIT_EFLAGS(EFlags);
14133 IEM_MC_ADVANCE_RIP();
14134 IEM_MC_END();
14135 return VINF_SUCCESS;
14136
14137 case IEMMODE_64BIT:
14138 IEM_MC_BEGIN(3, 2);
14139 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14140 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
14141 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14143
14144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14146 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14147 IEM_MC_FETCH_EFLAGS(EFlags);
14148 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14149
14150 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14151 IEM_MC_COMMIT_EFLAGS(EFlags);
14152 IEM_MC_ADVANCE_RIP();
14153 IEM_MC_END();
14154 return VINF_SUCCESS;
14155
14156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14157 }
14158 }
14159}
14160
14161
14162/** Opcode 0xd2. */
14163FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
14164{
14165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14166 PCIEMOPSHIFTSIZES pImpl;
14167 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14168 {
14169 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
14170 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
14171 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
14172 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
14173 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
14174 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
14175 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
14176 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14177 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
14178 }
14179 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14180
14181 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14182 {
14183 /* register */
14184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14185 IEM_MC_BEGIN(3, 0);
14186 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14187 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14188 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14189 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14190 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14191 IEM_MC_REF_EFLAGS(pEFlags);
14192 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14193 IEM_MC_ADVANCE_RIP();
14194 IEM_MC_END();
14195 }
14196 else
14197 {
14198 /* memory */
14199 IEM_MC_BEGIN(3, 2);
14200 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14201 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14202 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14204
14205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14207 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14208 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14209 IEM_MC_FETCH_EFLAGS(EFlags);
14210 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
14211
14212 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14213 IEM_MC_COMMIT_EFLAGS(EFlags);
14214 IEM_MC_ADVANCE_RIP();
14215 IEM_MC_END();
14216 }
14217 return VINF_SUCCESS;
14218}
14219
14220
14221/** Opcode 0xd3. */
14222FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
14223{
14224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14225 PCIEMOPSHIFTSIZES pImpl;
14226 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14227 {
14228 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
14229 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
14230 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
14231 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
14232 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
14233 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
14234 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
14235 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14236 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
14237 }
14238 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14239
14240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14241 {
14242 /* register */
14243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14244 switch (pVCpu->iem.s.enmEffOpSize)
14245 {
14246 case IEMMODE_16BIT:
14247 IEM_MC_BEGIN(3, 0);
14248 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14249 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14250 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14251 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14252 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14253 IEM_MC_REF_EFLAGS(pEFlags);
14254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14255 IEM_MC_ADVANCE_RIP();
14256 IEM_MC_END();
14257 return VINF_SUCCESS;
14258
14259 case IEMMODE_32BIT:
14260 IEM_MC_BEGIN(3, 0);
14261 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14262 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14263 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14264 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14265 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14266 IEM_MC_REF_EFLAGS(pEFlags);
14267 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14268 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14269 IEM_MC_ADVANCE_RIP();
14270 IEM_MC_END();
14271 return VINF_SUCCESS;
14272
14273 case IEMMODE_64BIT:
14274 IEM_MC_BEGIN(3, 0);
14275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14276 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14277 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14278 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14279 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14280 IEM_MC_REF_EFLAGS(pEFlags);
14281 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14282 IEM_MC_ADVANCE_RIP();
14283 IEM_MC_END();
14284 return VINF_SUCCESS;
14285
14286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14287 }
14288 }
14289 else
14290 {
14291 /* memory */
14292 switch (pVCpu->iem.s.enmEffOpSize)
14293 {
14294 case IEMMODE_16BIT:
14295 IEM_MC_BEGIN(3, 2);
14296 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14297 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14298 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14300
14301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14303 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14304 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14305 IEM_MC_FETCH_EFLAGS(EFlags);
14306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14307
14308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14309 IEM_MC_COMMIT_EFLAGS(EFlags);
14310 IEM_MC_ADVANCE_RIP();
14311 IEM_MC_END();
14312 return VINF_SUCCESS;
14313
14314 case IEMMODE_32BIT:
14315 IEM_MC_BEGIN(3, 2);
14316 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14317 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14318 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14320
14321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14323 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14324 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14325 IEM_MC_FETCH_EFLAGS(EFlags);
14326 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14327
14328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14329 IEM_MC_COMMIT_EFLAGS(EFlags);
14330 IEM_MC_ADVANCE_RIP();
14331 IEM_MC_END();
14332 return VINF_SUCCESS;
14333
14334 case IEMMODE_64BIT:
14335 IEM_MC_BEGIN(3, 2);
14336 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14337 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14338 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14340
14341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14343 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14344 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14345 IEM_MC_FETCH_EFLAGS(EFlags);
14346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14347
14348 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14349 IEM_MC_COMMIT_EFLAGS(EFlags);
14350 IEM_MC_ADVANCE_RIP();
14351 IEM_MC_END();
14352 return VINF_SUCCESS;
14353
14354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14355 }
14356 }
14357}
14358
14359/** Opcode 0xd4. */
14360FNIEMOP_DEF(iemOp_aam_Ib)
14361{
14362 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14363 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14365 IEMOP_HLP_NO_64BIT();
14366 if (!bImm)
14367 return IEMOP_RAISE_DIVIDE_ERROR();
14368 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14369}
14370
14371
14372/** Opcode 0xd5. */
14373FNIEMOP_DEF(iemOp_aad_Ib)
14374{
14375 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14376 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14378 IEMOP_HLP_NO_64BIT();
14379 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14380}
14381
14382
14383/** Opcode 0xd6. */
14384FNIEMOP_DEF(iemOp_salc)
14385{
14386 IEMOP_MNEMONIC(salc, "salc");
14387 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14388 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14390 IEMOP_HLP_NO_64BIT();
14391
14392 IEM_MC_BEGIN(0, 0);
14393 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14394 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14395 } IEM_MC_ELSE() {
14396 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14397 } IEM_MC_ENDIF();
14398 IEM_MC_ADVANCE_RIP();
14399 IEM_MC_END();
14400 return VINF_SUCCESS;
14401}
14402
14403
14404/** Opcode 0xd7. */
14405FNIEMOP_DEF(iemOp_xlat)
14406{
14407 IEMOP_MNEMONIC(xlat, "xlat");
14408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14409 switch (pVCpu->iem.s.enmEffAddrMode)
14410 {
14411 case IEMMODE_16BIT:
14412 IEM_MC_BEGIN(2, 0);
14413 IEM_MC_LOCAL(uint8_t, u8Tmp);
14414 IEM_MC_LOCAL(uint16_t, u16Addr);
14415 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14416 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14417 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14418 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14419 IEM_MC_ADVANCE_RIP();
14420 IEM_MC_END();
14421 return VINF_SUCCESS;
14422
14423 case IEMMODE_32BIT:
14424 IEM_MC_BEGIN(2, 0);
14425 IEM_MC_LOCAL(uint8_t, u8Tmp);
14426 IEM_MC_LOCAL(uint32_t, u32Addr);
14427 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14428 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14429 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14430 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14431 IEM_MC_ADVANCE_RIP();
14432 IEM_MC_END();
14433 return VINF_SUCCESS;
14434
14435 case IEMMODE_64BIT:
14436 IEM_MC_BEGIN(2, 0);
14437 IEM_MC_LOCAL(uint8_t, u8Tmp);
14438 IEM_MC_LOCAL(uint64_t, u64Addr);
14439 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14440 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14441 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14442 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14443 IEM_MC_ADVANCE_RIP();
14444 IEM_MC_END();
14445 return VINF_SUCCESS;
14446
14447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14448 }
14449}
14450
14451
14452/**
14453 * Common worker for FPU instructions working on ST0 and STn, and storing the
14454 * result in ST0.
14455 *
14456 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14457 */
14458FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14459{
14460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14461
14462 IEM_MC_BEGIN(3, 1);
14463 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14464 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14465 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14466 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14467
14468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14470 IEM_MC_PREPARE_FPU_USAGE();
14471 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14472 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14473 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14474 IEM_MC_ELSE()
14475 IEM_MC_FPU_STACK_UNDERFLOW(0);
14476 IEM_MC_ENDIF();
14477 IEM_MC_ADVANCE_RIP();
14478
14479 IEM_MC_END();
14480 return VINF_SUCCESS;
14481}
14482
14483
14484/**
14485 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14486 * flags.
14487 *
14488 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14489 */
14490FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14491{
14492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14493
14494 IEM_MC_BEGIN(3, 1);
14495 IEM_MC_LOCAL(uint16_t, u16Fsw);
14496 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14497 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14498 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14499
14500 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14501 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14502 IEM_MC_PREPARE_FPU_USAGE();
14503 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14504 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14505 IEM_MC_UPDATE_FSW(u16Fsw);
14506 IEM_MC_ELSE()
14507 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14508 IEM_MC_ENDIF();
14509 IEM_MC_ADVANCE_RIP();
14510
14511 IEM_MC_END();
14512 return VINF_SUCCESS;
14513}
14514
14515
14516/**
14517 * Common worker for FPU instructions working on ST0 and STn, only affecting
14518 * flags, and popping when done.
14519 *
14520 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14521 */
14522FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14523{
14524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14525
14526 IEM_MC_BEGIN(3, 1);
14527 IEM_MC_LOCAL(uint16_t, u16Fsw);
14528 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14529 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14530 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14531
14532 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14533 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14534 IEM_MC_PREPARE_FPU_USAGE();
14535 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14536 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14537 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14538 IEM_MC_ELSE()
14539 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14540 IEM_MC_ENDIF();
14541 IEM_MC_ADVANCE_RIP();
14542
14543 IEM_MC_END();
14544 return VINF_SUCCESS;
14545}
14546
14547
14548/** Opcode 0xd8 11/0. */
14549FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14550{
14551 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14552 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14553}
14554
14555
14556/** Opcode 0xd8 11/1. */
14557FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14558{
14559 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14560 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14561}
14562
14563
14564/** Opcode 0xd8 11/2. */
14565FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14566{
14567 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14568 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14569}
14570
14571
14572/** Opcode 0xd8 11/3. */
14573FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14574{
14575 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14576 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14577}
14578
14579
14580/** Opcode 0xd8 11/4. */
14581FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14582{
14583 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14584 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14585}
14586
14587
14588/** Opcode 0xd8 11/5. */
14589FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14590{
14591 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14592 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14593}
14594
14595
14596/** Opcode 0xd8 11/6. */
14597FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14598{
14599 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14600 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14601}
14602
14603
14604/** Opcode 0xd8 11/7. */
14605FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14606{
14607 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14608 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14609}
14610
14611
14612/**
14613 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14614 * the result in ST0.
14615 *
14616 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14617 */
14618FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14619{
14620 IEM_MC_BEGIN(3, 3);
14621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14622 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14623 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14624 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14625 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14626 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14627
14628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14630
14631 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14632 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14633 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14634
14635 IEM_MC_PREPARE_FPU_USAGE();
14636 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14637 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14638 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14639 IEM_MC_ELSE()
14640 IEM_MC_FPU_STACK_UNDERFLOW(0);
14641 IEM_MC_ENDIF();
14642 IEM_MC_ADVANCE_RIP();
14643
14644 IEM_MC_END();
14645 return VINF_SUCCESS;
14646}
14647
14648
14649/** Opcode 0xd8 !11/0. */
14650FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14651{
14652 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14653 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14654}
14655
14656
14657/** Opcode 0xd8 !11/1. */
14658FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14659{
14660 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14661 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14662}
14663
14664
14665/** Opcode 0xd8 !11/2. */
14666FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14667{
14668 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14669
14670 IEM_MC_BEGIN(3, 3);
14671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14672 IEM_MC_LOCAL(uint16_t, u16Fsw);
14673 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14674 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14675 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14676 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14677
14678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14680
14681 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14682 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14683 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14684
14685 IEM_MC_PREPARE_FPU_USAGE();
14686 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14687 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14688 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14689 IEM_MC_ELSE()
14690 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14691 IEM_MC_ENDIF();
14692 IEM_MC_ADVANCE_RIP();
14693
14694 IEM_MC_END();
14695 return VINF_SUCCESS;
14696}
14697
14698
14699/** Opcode 0xd8 !11/3. */
14700FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14701{
14702 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14703
14704 IEM_MC_BEGIN(3, 3);
14705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14706 IEM_MC_LOCAL(uint16_t, u16Fsw);
14707 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14708 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14709 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14710 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14711
14712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14714
14715 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14716 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14717 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14718
14719 IEM_MC_PREPARE_FPU_USAGE();
14720 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14721 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14722 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14723 IEM_MC_ELSE()
14724 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14725 IEM_MC_ENDIF();
14726 IEM_MC_ADVANCE_RIP();
14727
14728 IEM_MC_END();
14729 return VINF_SUCCESS;
14730}
14731
14732
14733/** Opcode 0xd8 !11/4. */
14734FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14735{
14736 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14737 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14738}
14739
14740
14741/** Opcode 0xd8 !11/5. */
14742FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14743{
14744 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14745 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14746}
14747
14748
14749/** Opcode 0xd8 !11/6. */
14750FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14751{
14752 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14753 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14754}
14755
14756
14757/** Opcode 0xd8 !11/7. */
14758FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14759{
14760 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14761 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14762}
14763
14764
14765/** Opcode 0xd8. */
14766FNIEMOP_DEF(iemOp_EscF0)
14767{
14768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14769 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14770
14771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14772 {
14773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14774 {
14775 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14776 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14777 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14778 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14779 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14780 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14781 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14782 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14784 }
14785 }
14786 else
14787 {
14788 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14789 {
14790 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14791 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14792 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14793 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14794 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14795 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14796 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14797 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14799 }
14800 }
14801}
14802
14803
14804/** Opcode 0xd9 /0 mem32real
14805 * @sa iemOp_fld_m64r */
14806FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14807{
14808 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14809
14810 IEM_MC_BEGIN(2, 3);
14811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14812 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14813 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14814 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14815 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14816
14817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14819
14820 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14821 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14822 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14823
14824 IEM_MC_PREPARE_FPU_USAGE();
14825 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14826 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14827 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14828 IEM_MC_ELSE()
14829 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14830 IEM_MC_ENDIF();
14831 IEM_MC_ADVANCE_RIP();
14832
14833 IEM_MC_END();
14834 return VINF_SUCCESS;
14835}
14836
14837
14838/** Opcode 0xd9 !11/2 mem32real */
14839FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14840{
14841 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14842 IEM_MC_BEGIN(3, 2);
14843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14844 IEM_MC_LOCAL(uint16_t, u16Fsw);
14845 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14846 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14847 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14848
14849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14853
14854 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14855 IEM_MC_PREPARE_FPU_USAGE();
14856 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14857 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14858 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14859 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14860 IEM_MC_ELSE()
14861 IEM_MC_IF_FCW_IM()
14862 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14863 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14864 IEM_MC_ENDIF();
14865 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14866 IEM_MC_ENDIF();
14867 IEM_MC_ADVANCE_RIP();
14868
14869 IEM_MC_END();
14870 return VINF_SUCCESS;
14871}
14872
14873
14874/** Opcode 0xd9 !11/3 */
14875FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14876{
14877 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14878 IEM_MC_BEGIN(3, 2);
14879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14880 IEM_MC_LOCAL(uint16_t, u16Fsw);
14881 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14882 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14883 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14884
14885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14888 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14889
14890 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14891 IEM_MC_PREPARE_FPU_USAGE();
14892 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14893 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14894 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14895 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14896 IEM_MC_ELSE()
14897 IEM_MC_IF_FCW_IM()
14898 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14899 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14900 IEM_MC_ENDIF();
14901 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14902 IEM_MC_ENDIF();
14903 IEM_MC_ADVANCE_RIP();
14904
14905 IEM_MC_END();
14906 return VINF_SUCCESS;
14907}
14908
14909
14910/** Opcode 0xd9 !11/4 */
14911FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14912{
14913 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14914 IEM_MC_BEGIN(3, 0);
14915 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14916 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14917 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14920 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14921 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14922 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14923 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14924 IEM_MC_END();
14925 return VINF_SUCCESS;
14926}
14927
14928
14929/** Opcode 0xd9 !11/5 */
14930FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14931{
14932 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14933 IEM_MC_BEGIN(1, 1);
14934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14935 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14939 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14940 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14941 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14942 IEM_MC_END();
14943 return VINF_SUCCESS;
14944}
14945
14946
14947/** Opcode 0xd9 !11/6 */
14948FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14949{
14950 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14951 IEM_MC_BEGIN(3, 0);
14952 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14953 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14954 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14958 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14959 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14960 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14961 IEM_MC_END();
14962 return VINF_SUCCESS;
14963}
14964
14965
14966/** Opcode 0xd9 !11/7 */
14967FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14968{
14969 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14970 IEM_MC_BEGIN(2, 0);
14971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14972 IEM_MC_LOCAL(uint16_t, u16Fcw);
14973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14976 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14977 IEM_MC_FETCH_FCW(u16Fcw);
14978 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14979 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14980 IEM_MC_END();
14981 return VINF_SUCCESS;
14982}
14983
14984
14985/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14986FNIEMOP_DEF(iemOp_fnop)
14987{
14988 IEMOP_MNEMONIC(fnop, "fnop");
14989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14990
14991 IEM_MC_BEGIN(0, 0);
14992 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14993 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14994 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14995 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14996 * intel optimizations. Investigate. */
14997 IEM_MC_UPDATE_FPU_OPCODE_IP();
14998 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14999 IEM_MC_END();
15000 return VINF_SUCCESS;
15001}
15002
15003
15004/** Opcode 0xd9 11/0 stN */
15005FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
15006{
15007 IEMOP_MNEMONIC(fld_stN, "fld stN");
15008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15009
15010 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
15011 * indicates that it does. */
15012 IEM_MC_BEGIN(0, 2);
15013 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15014 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15017
15018 IEM_MC_PREPARE_FPU_USAGE();
15019 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
15020 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15021 IEM_MC_PUSH_FPU_RESULT(FpuRes);
15022 IEM_MC_ELSE()
15023 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
15024 IEM_MC_ENDIF();
15025
15026 IEM_MC_ADVANCE_RIP();
15027 IEM_MC_END();
15028
15029 return VINF_SUCCESS;
15030}
15031
15032
15033/** Opcode 0xd9 11/3 stN */
15034FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
15035{
15036 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
15037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15038
15039 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
15040 * indicates that it does. */
15041 IEM_MC_BEGIN(1, 3);
15042 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
15043 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
15044 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15045 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
15046 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15047 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15048
15049 IEM_MC_PREPARE_FPU_USAGE();
15050 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
15051 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
15052 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
15053 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15054 IEM_MC_ELSE()
15055 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
15056 IEM_MC_ENDIF();
15057
15058 IEM_MC_ADVANCE_RIP();
15059 IEM_MC_END();
15060
15061 return VINF_SUCCESS;
15062}
15063
15064
15065/** Opcode 0xd9 11/4, 0xdd 11/2. */
15066FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
15067{
15068 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
15069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15070
15071 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
15072 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
15073 if (!iDstReg)
15074 {
15075 IEM_MC_BEGIN(0, 1);
15076 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
15077 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15078 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15079
15080 IEM_MC_PREPARE_FPU_USAGE();
15081 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
15082 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
15083 IEM_MC_ELSE()
15084 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
15085 IEM_MC_ENDIF();
15086
15087 IEM_MC_ADVANCE_RIP();
15088 IEM_MC_END();
15089 }
15090 else
15091 {
15092 IEM_MC_BEGIN(0, 2);
15093 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15094 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15097
15098 IEM_MC_PREPARE_FPU_USAGE();
15099 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15100 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15101 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
15102 IEM_MC_ELSE()
15103 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
15104 IEM_MC_ENDIF();
15105
15106 IEM_MC_ADVANCE_RIP();
15107 IEM_MC_END();
15108 }
15109 return VINF_SUCCESS;
15110}
15111
15112
15113/**
15114 * Common worker for FPU instructions working on ST0 and replaces it with the
15115 * result, i.e. unary operators.
15116 *
15117 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15118 */
15119FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
15120{
15121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15122
15123 IEM_MC_BEGIN(2, 1);
15124 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15125 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15126 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15127
15128 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15129 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15130 IEM_MC_PREPARE_FPU_USAGE();
15131 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15132 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
15133 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15134 IEM_MC_ELSE()
15135 IEM_MC_FPU_STACK_UNDERFLOW(0);
15136 IEM_MC_ENDIF();
15137 IEM_MC_ADVANCE_RIP();
15138
15139 IEM_MC_END();
15140 return VINF_SUCCESS;
15141}
15142
15143
15144/** Opcode 0xd9 0xe0. */
15145FNIEMOP_DEF(iemOp_fchs)
15146{
15147 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
15148 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
15149}
15150
15151
15152/** Opcode 0xd9 0xe1. */
15153FNIEMOP_DEF(iemOp_fabs)
15154{
15155 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
15156 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
15157}
15158
15159
15160/**
15161 * Common worker for FPU instructions working on ST0 and only returns FSW.
15162 *
15163 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15164 */
15165FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
15166{
15167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15168
15169 IEM_MC_BEGIN(2, 1);
15170 IEM_MC_LOCAL(uint16_t, u16Fsw);
15171 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15172 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15173
15174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15175 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15176 IEM_MC_PREPARE_FPU_USAGE();
15177 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15178 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
15179 IEM_MC_UPDATE_FSW(u16Fsw);
15180 IEM_MC_ELSE()
15181 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
15182 IEM_MC_ENDIF();
15183 IEM_MC_ADVANCE_RIP();
15184
15185 IEM_MC_END();
15186 return VINF_SUCCESS;
15187}
15188
15189
15190/** Opcode 0xd9 0xe4. */
15191FNIEMOP_DEF(iemOp_ftst)
15192{
15193 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
15194 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
15195}
15196
15197
15198/** Opcode 0xd9 0xe5. */
15199FNIEMOP_DEF(iemOp_fxam)
15200{
15201 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
15202 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
15203}
15204
15205
15206/**
15207 * Common worker for FPU instructions pushing a constant onto the FPU stack.
15208 *
15209 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15210 */
15211FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
15212{
15213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15214
15215 IEM_MC_BEGIN(1, 1);
15216 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15217 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15218
15219 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15220 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15221 IEM_MC_PREPARE_FPU_USAGE();
15222 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15223 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
15224 IEM_MC_PUSH_FPU_RESULT(FpuRes);
15225 IEM_MC_ELSE()
15226 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
15227 IEM_MC_ENDIF();
15228 IEM_MC_ADVANCE_RIP();
15229
15230 IEM_MC_END();
15231 return VINF_SUCCESS;
15232}
15233
15234
15235/** Opcode 0xd9 0xe8. */
15236FNIEMOP_DEF(iemOp_fld1)
15237{
15238 IEMOP_MNEMONIC(fld1, "fld1");
15239 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
15240}
15241
15242
15243/** Opcode 0xd9 0xe9. */
15244FNIEMOP_DEF(iemOp_fldl2t)
15245{
15246 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15247 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15248}
15249
15250
15251/** Opcode 0xd9 0xea. */
15252FNIEMOP_DEF(iemOp_fldl2e)
15253{
15254 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15255 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15256}
15257
15258/** Opcode 0xd9 0xeb. */
15259FNIEMOP_DEF(iemOp_fldpi)
15260{
15261 IEMOP_MNEMONIC(fldpi, "fldpi");
15262 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15263}
15264
15265
15266/** Opcode 0xd9 0xec. */
15267FNIEMOP_DEF(iemOp_fldlg2)
15268{
15269 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15270 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15271}
15272
15273/** Opcode 0xd9 0xed. */
15274FNIEMOP_DEF(iemOp_fldln2)
15275{
15276 IEMOP_MNEMONIC(fldln2, "fldln2");
15277 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15278}
15279
15280
15281/** Opcode 0xd9 0xee. */
15282FNIEMOP_DEF(iemOp_fldz)
15283{
15284 IEMOP_MNEMONIC(fldz, "fldz");
15285 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15286}
15287
15288
15289/** Opcode 0xd9 0xf0. */
15290FNIEMOP_DEF(iemOp_f2xm1)
15291{
15292 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15293 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15294}
15295
15296
15297/**
15298 * Common worker for FPU instructions working on STn and ST0, storing the result
15299 * in STn, and popping the stack unless IE, DE or ZE was raised.
15300 *
15301 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15302 */
15303FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15304{
15305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15306
15307 IEM_MC_BEGIN(3, 1);
15308 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15309 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15310 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15311 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15312
15313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15314 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15315
15316 IEM_MC_PREPARE_FPU_USAGE();
15317 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15318 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15319 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15320 IEM_MC_ELSE()
15321 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15322 IEM_MC_ENDIF();
15323 IEM_MC_ADVANCE_RIP();
15324
15325 IEM_MC_END();
15326 return VINF_SUCCESS;
15327}
15328
15329
15330/** Opcode 0xd9 0xf1. */
15331FNIEMOP_DEF(iemOp_fyl2x)
15332{
15333 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15334 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15335}
15336
15337
15338/**
15339 * Common worker for FPU instructions working on ST0 and having two outputs, one
15340 * replacing ST0 and one pushed onto the stack.
15341 *
15342 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15343 */
15344FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15345{
15346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15347
15348 IEM_MC_BEGIN(2, 1);
15349 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15350 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15351 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15352
15353 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15354 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15355 IEM_MC_PREPARE_FPU_USAGE();
15356 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15357 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15358 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15359 IEM_MC_ELSE()
15360 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15361 IEM_MC_ENDIF();
15362 IEM_MC_ADVANCE_RIP();
15363
15364 IEM_MC_END();
15365 return VINF_SUCCESS;
15366}
15367
15368
15369/** Opcode 0xd9 0xf2. */
15370FNIEMOP_DEF(iemOp_fptan)
15371{
15372 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15373 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15374}
15375
15376
15377/** Opcode 0xd9 0xf3. */
15378FNIEMOP_DEF(iemOp_fpatan)
15379{
15380 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15381 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15382}
15383
15384
15385/** Opcode 0xd9 0xf4. */
15386FNIEMOP_DEF(iemOp_fxtract)
15387{
15388 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15389 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15390}
15391
15392
15393/** Opcode 0xd9 0xf5. */
15394FNIEMOP_DEF(iemOp_fprem1)
15395{
15396 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15397 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15398}
15399
15400
15401/** Opcode 0xd9 0xf6. */
15402FNIEMOP_DEF(iemOp_fdecstp)
15403{
15404 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15406 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15407 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15408 * FINCSTP and FDECSTP. */
15409
15410 IEM_MC_BEGIN(0,0);
15411
15412 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15413 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15414
15415 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15416 IEM_MC_FPU_STACK_DEC_TOP();
15417 IEM_MC_UPDATE_FSW_CONST(0);
15418
15419 IEM_MC_ADVANCE_RIP();
15420 IEM_MC_END();
15421 return VINF_SUCCESS;
15422}
15423
15424
15425/** Opcode 0xd9 0xf7. */
15426FNIEMOP_DEF(iemOp_fincstp)
15427{
15428 IEMOP_MNEMONIC(fincstp, "fincstp");
15429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15430 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15431 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15432 * FINCSTP and FDECSTP. */
15433
15434 IEM_MC_BEGIN(0,0);
15435
15436 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15437 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15438
15439 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15440 IEM_MC_FPU_STACK_INC_TOP();
15441 IEM_MC_UPDATE_FSW_CONST(0);
15442
15443 IEM_MC_ADVANCE_RIP();
15444 IEM_MC_END();
15445 return VINF_SUCCESS;
15446}
15447
15448
15449/** Opcode 0xd9 0xf8. */
15450FNIEMOP_DEF(iemOp_fprem)
15451{
15452 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15453 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15454}
15455
15456
15457/** Opcode 0xd9 0xf9. */
15458FNIEMOP_DEF(iemOp_fyl2xp1)
15459{
15460 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15461 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15462}
15463
15464
15465/** Opcode 0xd9 0xfa. */
15466FNIEMOP_DEF(iemOp_fsqrt)
15467{
15468 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15469 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15470}
15471
15472
15473/** Opcode 0xd9 0xfb. */
15474FNIEMOP_DEF(iemOp_fsincos)
15475{
15476 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15477 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15478}
15479
15480
15481/** Opcode 0xd9 0xfc. */
15482FNIEMOP_DEF(iemOp_frndint)
15483{
15484 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15485 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15486}
15487
15488
15489/** Opcode 0xd9 0xfd. */
15490FNIEMOP_DEF(iemOp_fscale)
15491{
15492 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15493 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15494}
15495
15496
15497/** Opcode 0xd9 0xfe. */
15498FNIEMOP_DEF(iemOp_fsin)
15499{
15500 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15501 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15502}
15503
15504
15505/** Opcode 0xd9 0xff. */
15506FNIEMOP_DEF(iemOp_fcos)
15507{
15508 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15509 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15510}
15511
15512
15513/** Used by iemOp_EscF1. */
15514IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15515{
15516 /* 0xe0 */ iemOp_fchs,
15517 /* 0xe1 */ iemOp_fabs,
15518 /* 0xe2 */ iemOp_Invalid,
15519 /* 0xe3 */ iemOp_Invalid,
15520 /* 0xe4 */ iemOp_ftst,
15521 /* 0xe5 */ iemOp_fxam,
15522 /* 0xe6 */ iemOp_Invalid,
15523 /* 0xe7 */ iemOp_Invalid,
15524 /* 0xe8 */ iemOp_fld1,
15525 /* 0xe9 */ iemOp_fldl2t,
15526 /* 0xea */ iemOp_fldl2e,
15527 /* 0xeb */ iemOp_fldpi,
15528 /* 0xec */ iemOp_fldlg2,
15529 /* 0xed */ iemOp_fldln2,
15530 /* 0xee */ iemOp_fldz,
15531 /* 0xef */ iemOp_Invalid,
15532 /* 0xf0 */ iemOp_f2xm1,
15533 /* 0xf1 */ iemOp_fyl2x,
15534 /* 0xf2 */ iemOp_fptan,
15535 /* 0xf3 */ iemOp_fpatan,
15536 /* 0xf4 */ iemOp_fxtract,
15537 /* 0xf5 */ iemOp_fprem1,
15538 /* 0xf6 */ iemOp_fdecstp,
15539 /* 0xf7 */ iemOp_fincstp,
15540 /* 0xf8 */ iemOp_fprem,
15541 /* 0xf9 */ iemOp_fyl2xp1,
15542 /* 0xfa */ iemOp_fsqrt,
15543 /* 0xfb */ iemOp_fsincos,
15544 /* 0xfc */ iemOp_frndint,
15545 /* 0xfd */ iemOp_fscale,
15546 /* 0xfe */ iemOp_fsin,
15547 /* 0xff */ iemOp_fcos
15548};
15549
15550
15551/** Opcode 0xd9. */
15552FNIEMOP_DEF(iemOp_EscF1)
15553{
15554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15555 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15556
15557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15558 {
15559 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15560 {
15561 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15562 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15563 case 2:
15564 if (bRm == 0xd0)
15565 return FNIEMOP_CALL(iemOp_fnop);
15566 return IEMOP_RAISE_INVALID_OPCODE();
15567 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15568 case 4:
15569 case 5:
15570 case 6:
15571 case 7:
15572 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15573 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15575 }
15576 }
15577 else
15578 {
15579 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15580 {
15581 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15582 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15583 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15584 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15585 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15586 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15587 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15588 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15590 }
15591 }
15592}
15593
15594
15595/** Opcode 0xda 11/0. */
15596FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15597{
15598 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15600
15601 IEM_MC_BEGIN(0, 1);
15602 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15603
15604 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15605 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15606
15607 IEM_MC_PREPARE_FPU_USAGE();
15608 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15610 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15611 IEM_MC_ENDIF();
15612 IEM_MC_UPDATE_FPU_OPCODE_IP();
15613 IEM_MC_ELSE()
15614 IEM_MC_FPU_STACK_UNDERFLOW(0);
15615 IEM_MC_ENDIF();
15616 IEM_MC_ADVANCE_RIP();
15617
15618 IEM_MC_END();
15619 return VINF_SUCCESS;
15620}
15621
15622
15623/** Opcode 0xda 11/1. */
15624FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15625{
15626 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15628
15629 IEM_MC_BEGIN(0, 1);
15630 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15631
15632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15634
15635 IEM_MC_PREPARE_FPU_USAGE();
15636 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15638 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15639 IEM_MC_ENDIF();
15640 IEM_MC_UPDATE_FPU_OPCODE_IP();
15641 IEM_MC_ELSE()
15642 IEM_MC_FPU_STACK_UNDERFLOW(0);
15643 IEM_MC_ENDIF();
15644 IEM_MC_ADVANCE_RIP();
15645
15646 IEM_MC_END();
15647 return VINF_SUCCESS;
15648}
15649
15650
15651/** Opcode 0xda 11/2. */
15652FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15653{
15654 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15656
15657 IEM_MC_BEGIN(0, 1);
15658 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15659
15660 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15661 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15662
15663 IEM_MC_PREPARE_FPU_USAGE();
15664 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15665 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15666 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15667 IEM_MC_ENDIF();
15668 IEM_MC_UPDATE_FPU_OPCODE_IP();
15669 IEM_MC_ELSE()
15670 IEM_MC_FPU_STACK_UNDERFLOW(0);
15671 IEM_MC_ENDIF();
15672 IEM_MC_ADVANCE_RIP();
15673
15674 IEM_MC_END();
15675 return VINF_SUCCESS;
15676}
15677
15678
15679/** Opcode 0xda 11/3. */
15680FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15681{
15682 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15684
15685 IEM_MC_BEGIN(0, 1);
15686 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15687
15688 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15689 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15690
15691 IEM_MC_PREPARE_FPU_USAGE();
15692 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15693 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15694 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15695 IEM_MC_ENDIF();
15696 IEM_MC_UPDATE_FPU_OPCODE_IP();
15697 IEM_MC_ELSE()
15698 IEM_MC_FPU_STACK_UNDERFLOW(0);
15699 IEM_MC_ENDIF();
15700 IEM_MC_ADVANCE_RIP();
15701
15702 IEM_MC_END();
15703 return VINF_SUCCESS;
15704}
15705
15706
15707/**
15708 * Common worker for FPU instructions working on ST0 and STn, only affecting
15709 * flags, and popping twice when done.
15710 *
15711 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15712 */
15713FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15714{
15715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15716
15717 IEM_MC_BEGIN(3, 1);
15718 IEM_MC_LOCAL(uint16_t, u16Fsw);
15719 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15720 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15721 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15722
15723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15724 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15725
15726 IEM_MC_PREPARE_FPU_USAGE();
15727 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15728 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15729 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15730 IEM_MC_ELSE()
15731 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15732 IEM_MC_ENDIF();
15733 IEM_MC_ADVANCE_RIP();
15734
15735 IEM_MC_END();
15736 return VINF_SUCCESS;
15737}
15738
15739
15740/** Opcode 0xda 0xe9. */
15741FNIEMOP_DEF(iemOp_fucompp)
15742{
15743 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15744 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15745}
15746
15747
15748/**
15749 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15750 * the result in ST0.
15751 *
15752 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15753 */
15754FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15755{
15756 IEM_MC_BEGIN(3, 3);
15757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15758 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15759 IEM_MC_LOCAL(int32_t, i32Val2);
15760 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15761 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15762 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15763
15764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15766
15767 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15768 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15769 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15770
15771 IEM_MC_PREPARE_FPU_USAGE();
15772 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15773 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15774 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15775 IEM_MC_ELSE()
15776 IEM_MC_FPU_STACK_UNDERFLOW(0);
15777 IEM_MC_ENDIF();
15778 IEM_MC_ADVANCE_RIP();
15779
15780 IEM_MC_END();
15781 return VINF_SUCCESS;
15782}
15783
15784
15785/** Opcode 0xda !11/0. */
15786FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15787{
15788 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15789 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15790}
15791
15792
15793/** Opcode 0xda !11/1. */
15794FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15795{
15796 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15797 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15798}
15799
15800
15801/** Opcode 0xda !11/2. */
15802FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15803{
15804 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15805
15806 IEM_MC_BEGIN(3, 3);
15807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15808 IEM_MC_LOCAL(uint16_t, u16Fsw);
15809 IEM_MC_LOCAL(int32_t, i32Val2);
15810 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15811 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15812 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15813
15814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15816
15817 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15818 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15819 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15820
15821 IEM_MC_PREPARE_FPU_USAGE();
15822 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15823 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15824 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15825 IEM_MC_ELSE()
15826 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15827 IEM_MC_ENDIF();
15828 IEM_MC_ADVANCE_RIP();
15829
15830 IEM_MC_END();
15831 return VINF_SUCCESS;
15832}
15833
15834
15835/** Opcode 0xda !11/3. */
15836FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15837{
15838 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15839
15840 IEM_MC_BEGIN(3, 3);
15841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15842 IEM_MC_LOCAL(uint16_t, u16Fsw);
15843 IEM_MC_LOCAL(int32_t, i32Val2);
15844 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15845 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15846 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15847
15848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15850
15851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15853 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15854
15855 IEM_MC_PREPARE_FPU_USAGE();
15856 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15857 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15858 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15859 IEM_MC_ELSE()
15860 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15861 IEM_MC_ENDIF();
15862 IEM_MC_ADVANCE_RIP();
15863
15864 IEM_MC_END();
15865 return VINF_SUCCESS;
15866}
15867
15868
15869/** Opcode 0xda !11/4. */
15870FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15871{
15872 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15873 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15874}
15875
15876
15877/** Opcode 0xda !11/5. */
15878FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15879{
15880 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15881 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15882}
15883
15884
15885/** Opcode 0xda !11/6. */
15886FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15887{
15888 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15889 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15890}
15891
15892
15893/** Opcode 0xda !11/7. */
15894FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15895{
15896 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15897 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15898}
15899
15900
15901/** Opcode 0xda. */
15902FNIEMOP_DEF(iemOp_EscF2)
15903{
15904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15905 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15907 {
15908 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15909 {
15910 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15911 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15912 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15913 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15914 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15915 case 5:
15916 if (bRm == 0xe9)
15917 return FNIEMOP_CALL(iemOp_fucompp);
15918 return IEMOP_RAISE_INVALID_OPCODE();
15919 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15920 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15922 }
15923 }
15924 else
15925 {
15926 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15927 {
15928 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15929 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15930 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15931 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15932 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15933 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15934 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15935 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15937 }
15938 }
15939}
15940
15941
15942/** Opcode 0xdb !11/0. */
15943FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15944{
15945 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15946
15947 IEM_MC_BEGIN(2, 3);
15948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15949 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15950 IEM_MC_LOCAL(int32_t, i32Val);
15951 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15952 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15953
15954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15956
15957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15958 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15959 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15960
15961 IEM_MC_PREPARE_FPU_USAGE();
15962 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15963 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15964 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15965 IEM_MC_ELSE()
15966 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15967 IEM_MC_ENDIF();
15968 IEM_MC_ADVANCE_RIP();
15969
15970 IEM_MC_END();
15971 return VINF_SUCCESS;
15972}
15973
15974
15975/** Opcode 0xdb !11/1. */
15976FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15977{
15978 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15979 IEM_MC_BEGIN(3, 2);
15980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15981 IEM_MC_LOCAL(uint16_t, u16Fsw);
15982 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15983 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15984 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15985
15986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15990
15991 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15992 IEM_MC_PREPARE_FPU_USAGE();
15993 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15994 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15995 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15996 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15997 IEM_MC_ELSE()
15998 IEM_MC_IF_FCW_IM()
15999 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
16000 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
16001 IEM_MC_ENDIF();
16002 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16003 IEM_MC_ENDIF();
16004 IEM_MC_ADVANCE_RIP();
16005
16006 IEM_MC_END();
16007 return VINF_SUCCESS;
16008}
16009
16010
16011/** Opcode 0xdb !11/2. */
16012FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
16013{
16014 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
16015 IEM_MC_BEGIN(3, 2);
16016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16017 IEM_MC_LOCAL(uint16_t, u16Fsw);
16018 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16019 IEM_MC_ARG(int32_t *, pi32Dst, 1);
16020 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16021
16022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16026
16027 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16028 IEM_MC_PREPARE_FPU_USAGE();
16029 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16030 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
16031 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
16032 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16033 IEM_MC_ELSE()
16034 IEM_MC_IF_FCW_IM()
16035 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
16036 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
16037 IEM_MC_ENDIF();
16038 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16039 IEM_MC_ENDIF();
16040 IEM_MC_ADVANCE_RIP();
16041
16042 IEM_MC_END();
16043 return VINF_SUCCESS;
16044}
16045
16046
16047/** Opcode 0xdb !11/3. */
16048FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
16049{
16050 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
16051 IEM_MC_BEGIN(3, 2);
16052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16053 IEM_MC_LOCAL(uint16_t, u16Fsw);
16054 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16055 IEM_MC_ARG(int32_t *, pi32Dst, 1);
16056 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16057
16058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16060 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16061 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16062
16063 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16064 IEM_MC_PREPARE_FPU_USAGE();
16065 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16066 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
16067 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
16068 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16069 IEM_MC_ELSE()
16070 IEM_MC_IF_FCW_IM()
16071 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
16072 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
16073 IEM_MC_ENDIF();
16074 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16075 IEM_MC_ENDIF();
16076 IEM_MC_ADVANCE_RIP();
16077
16078 IEM_MC_END();
16079 return VINF_SUCCESS;
16080}
16081
16082
16083/** Opcode 0xdb !11/5. */
16084FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
16085{
16086 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
16087
16088 IEM_MC_BEGIN(2, 3);
16089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16090 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16091 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
16092 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16093 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
16094
16095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16097
16098 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16099 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16100 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16101
16102 IEM_MC_PREPARE_FPU_USAGE();
16103 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16104 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
16105 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16106 IEM_MC_ELSE()
16107 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16108 IEM_MC_ENDIF();
16109 IEM_MC_ADVANCE_RIP();
16110
16111 IEM_MC_END();
16112 return VINF_SUCCESS;
16113}
16114
16115
16116/** Opcode 0xdb !11/7. */
16117FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
16118{
16119 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
16120 IEM_MC_BEGIN(3, 2);
16121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16122 IEM_MC_LOCAL(uint16_t, u16Fsw);
16123 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16124 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
16125 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16126
16127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16129 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16130 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16131
16132 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16133 IEM_MC_PREPARE_FPU_USAGE();
16134 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16135 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
16136 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
16137 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16138 IEM_MC_ELSE()
16139 IEM_MC_IF_FCW_IM()
16140 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
16141 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
16142 IEM_MC_ENDIF();
16143 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16144 IEM_MC_ENDIF();
16145 IEM_MC_ADVANCE_RIP();
16146
16147 IEM_MC_END();
16148 return VINF_SUCCESS;
16149}
16150
16151
16152/** Opcode 0xdb 11/0. */
16153FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
16154{
16155 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
16156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16157
16158 IEM_MC_BEGIN(0, 1);
16159 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16160
16161 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16162 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16163
16164 IEM_MC_PREPARE_FPU_USAGE();
16165 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16166 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
16167 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16168 IEM_MC_ENDIF();
16169 IEM_MC_UPDATE_FPU_OPCODE_IP();
16170 IEM_MC_ELSE()
16171 IEM_MC_FPU_STACK_UNDERFLOW(0);
16172 IEM_MC_ENDIF();
16173 IEM_MC_ADVANCE_RIP();
16174
16175 IEM_MC_END();
16176 return VINF_SUCCESS;
16177}
16178
16179
16180/** Opcode 0xdb 11/1. */
16181FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
16182{
16183 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
16184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16185
16186 IEM_MC_BEGIN(0, 1);
16187 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16188
16189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16190 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16191
16192 IEM_MC_PREPARE_FPU_USAGE();
16193 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16194 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
16195 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16196 IEM_MC_ENDIF();
16197 IEM_MC_UPDATE_FPU_OPCODE_IP();
16198 IEM_MC_ELSE()
16199 IEM_MC_FPU_STACK_UNDERFLOW(0);
16200 IEM_MC_ENDIF();
16201 IEM_MC_ADVANCE_RIP();
16202
16203 IEM_MC_END();
16204 return VINF_SUCCESS;
16205}
16206
16207
16208/** Opcode 0xdb 11/2. */
16209FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
16210{
16211 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
16212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16213
16214 IEM_MC_BEGIN(0, 1);
16215 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16216
16217 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16218 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16219
16220 IEM_MC_PREPARE_FPU_USAGE();
16221 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16222 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
16223 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16224 IEM_MC_ENDIF();
16225 IEM_MC_UPDATE_FPU_OPCODE_IP();
16226 IEM_MC_ELSE()
16227 IEM_MC_FPU_STACK_UNDERFLOW(0);
16228 IEM_MC_ENDIF();
16229 IEM_MC_ADVANCE_RIP();
16230
16231 IEM_MC_END();
16232 return VINF_SUCCESS;
16233}
16234
16235
16236/** Opcode 0xdb 11/3. */
16237FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
16238{
16239 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
16240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16241
16242 IEM_MC_BEGIN(0, 1);
16243 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16244
16245 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16246 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16247
16248 IEM_MC_PREPARE_FPU_USAGE();
16249 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16250 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16251 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16252 IEM_MC_ENDIF();
16253 IEM_MC_UPDATE_FPU_OPCODE_IP();
16254 IEM_MC_ELSE()
16255 IEM_MC_FPU_STACK_UNDERFLOW(0);
16256 IEM_MC_ENDIF();
16257 IEM_MC_ADVANCE_RIP();
16258
16259 IEM_MC_END();
16260 return VINF_SUCCESS;
16261}
16262
16263
16264/** Opcode 0xdb 0xe0. */
16265FNIEMOP_DEF(iemOp_fneni)
16266{
16267 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16269 IEM_MC_BEGIN(0,0);
16270 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16271 IEM_MC_ADVANCE_RIP();
16272 IEM_MC_END();
16273 return VINF_SUCCESS;
16274}
16275
16276
16277/** Opcode 0xdb 0xe1. */
16278FNIEMOP_DEF(iemOp_fndisi)
16279{
16280 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16282 IEM_MC_BEGIN(0,0);
16283 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16284 IEM_MC_ADVANCE_RIP();
16285 IEM_MC_END();
16286 return VINF_SUCCESS;
16287}
16288
16289
16290/** Opcode 0xdb 0xe2. */
16291FNIEMOP_DEF(iemOp_fnclex)
16292{
16293 IEMOP_MNEMONIC(fnclex, "fnclex");
16294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16295
16296 IEM_MC_BEGIN(0,0);
16297 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16298 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16299 IEM_MC_CLEAR_FSW_EX();
16300 IEM_MC_ADVANCE_RIP();
16301 IEM_MC_END();
16302 return VINF_SUCCESS;
16303}
16304
16305
16306/** Opcode 0xdb 0xe3. */
16307FNIEMOP_DEF(iemOp_fninit)
16308{
16309 IEMOP_MNEMONIC(fninit, "fninit");
16310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16311 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16312}
16313
16314
16315/** Opcode 0xdb 0xe4. */
16316FNIEMOP_DEF(iemOp_fnsetpm)
16317{
16318 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16320 IEM_MC_BEGIN(0,0);
16321 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16322 IEM_MC_ADVANCE_RIP();
16323 IEM_MC_END();
16324 return VINF_SUCCESS;
16325}
16326
16327
16328/** Opcode 0xdb 0xe5. */
16329FNIEMOP_DEF(iemOp_frstpm)
16330{
16331 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16332#if 0 /* #UDs on newer CPUs */
16333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16334 IEM_MC_BEGIN(0,0);
16335 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16336 IEM_MC_ADVANCE_RIP();
16337 IEM_MC_END();
16338 return VINF_SUCCESS;
16339#else
16340 return IEMOP_RAISE_INVALID_OPCODE();
16341#endif
16342}
16343
16344
16345/** Opcode 0xdb 11/5. */
16346FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16347{
16348 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16349 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16350}
16351
16352
16353/** Opcode 0xdb 11/6. */
16354FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16355{
16356 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16357 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16358}
16359
16360
16361/** Opcode 0xdb. */
16362FNIEMOP_DEF(iemOp_EscF3)
16363{
16364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16365 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16366 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16367 {
16368 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16369 {
16370 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16371 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16372 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16373 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16374 case 4:
16375 switch (bRm)
16376 {
16377 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16378 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16379 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16380 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16381 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16382 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16383 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16384 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16386 }
16387 break;
16388 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16389 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16390 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16392 }
16393 }
16394 else
16395 {
16396 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16397 {
16398 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16399 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16400 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16401 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16402 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16403 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16404 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16405 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16407 }
16408 }
16409}
16410
16411
16412/**
16413 * Common worker for FPU instructions working on STn and ST0, and storing the
16414 * result in STn unless IE, DE or ZE was raised.
16415 *
16416 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16417 */
16418FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16419{
16420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16421
16422 IEM_MC_BEGIN(3, 1);
16423 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16424 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16426 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16427
16428 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16429 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16430
16431 IEM_MC_PREPARE_FPU_USAGE();
16432 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16433 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16434 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16435 IEM_MC_ELSE()
16436 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16437 IEM_MC_ENDIF();
16438 IEM_MC_ADVANCE_RIP();
16439
16440 IEM_MC_END();
16441 return VINF_SUCCESS;
16442}
16443
16444
16445/** Opcode 0xdc 11/0. */
16446FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16447{
16448 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16449 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16450}
16451
16452
16453/** Opcode 0xdc 11/1. */
16454FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16455{
16456 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16457 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16458}
16459
16460
16461/** Opcode 0xdc 11/4. */
16462FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16463{
16464 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16465 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16466}
16467
16468
16469/** Opcode 0xdc 11/5. */
16470FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16471{
16472 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16473 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16474}
16475
16476
16477/** Opcode 0xdc 11/6. */
16478FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16479{
16480 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16481 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16482}
16483
16484
16485/** Opcode 0xdc 11/7. */
16486FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16487{
16488 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16489 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16490}
16491
16492
16493/**
16494 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16495 * memory operand, and storing the result in ST0.
16496 *
16497 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16498 */
16499FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16500{
16501 IEM_MC_BEGIN(3, 3);
16502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16503 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16504 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16505 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16506 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16507 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16508
16509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16512 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16513
16514 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16515 IEM_MC_PREPARE_FPU_USAGE();
16516 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16517 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16518 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16519 IEM_MC_ELSE()
16520 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16521 IEM_MC_ENDIF();
16522 IEM_MC_ADVANCE_RIP();
16523
16524 IEM_MC_END();
16525 return VINF_SUCCESS;
16526}
16527
16528
16529/** Opcode 0xdc !11/0. */
16530FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16531{
16532 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16533 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16534}
16535
16536
16537/** Opcode 0xdc !11/1. */
16538FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16539{
16540 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16541 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16542}
16543
16544
16545/** Opcode 0xdc !11/2. */
16546FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16547{
16548 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16549
16550 IEM_MC_BEGIN(3, 3);
16551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16552 IEM_MC_LOCAL(uint16_t, u16Fsw);
16553 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16554 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16555 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16556 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16557
16558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16560
16561 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16562 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16563 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16564
16565 IEM_MC_PREPARE_FPU_USAGE();
16566 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16567 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16568 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16569 IEM_MC_ELSE()
16570 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16571 IEM_MC_ENDIF();
16572 IEM_MC_ADVANCE_RIP();
16573
16574 IEM_MC_END();
16575 return VINF_SUCCESS;
16576}
16577
16578
16579/** Opcode 0xdc !11/3. */
16580FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16581{
16582 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16583
16584 IEM_MC_BEGIN(3, 3);
16585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16586 IEM_MC_LOCAL(uint16_t, u16Fsw);
16587 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16588 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16589 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16590 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16591
16592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16594
16595 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16597 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16598
16599 IEM_MC_PREPARE_FPU_USAGE();
16600 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16601 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16602 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16603 IEM_MC_ELSE()
16604 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16605 IEM_MC_ENDIF();
16606 IEM_MC_ADVANCE_RIP();
16607
16608 IEM_MC_END();
16609 return VINF_SUCCESS;
16610}
16611
16612
16613/** Opcode 0xdc !11/4. */
16614FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16615{
16616 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16617 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16618}
16619
16620
16621/** Opcode 0xdc !11/5. */
16622FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16623{
16624 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16625 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16626}
16627
16628
16629/** Opcode 0xdc !11/6. */
16630FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16631{
16632 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16633 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16634}
16635
16636
16637/** Opcode 0xdc !11/7. */
16638FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16639{
16640 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16641 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16642}
16643
16644
16645/** Opcode 0xdc. */
16646FNIEMOP_DEF(iemOp_EscF4)
16647{
16648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16649 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16650 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16651 {
16652 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16653 {
16654 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16655 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16656 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16657 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16658 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16659 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16660 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16661 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16663 }
16664 }
16665 else
16666 {
16667 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16668 {
16669 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16670 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16671 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16672 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16673 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16674 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16675 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16676 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16678 }
16679 }
16680}
16681
16682
16683/** Opcode 0xdd !11/0.
16684 * @sa iemOp_fld_m32r */
16685FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16686{
16687 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16688
16689 IEM_MC_BEGIN(2, 3);
16690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16691 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16692 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16693 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16694 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16695
16696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16700
16701 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16702 IEM_MC_PREPARE_FPU_USAGE();
16703 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16704 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16705 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16706 IEM_MC_ELSE()
16707 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16708 IEM_MC_ENDIF();
16709 IEM_MC_ADVANCE_RIP();
16710
16711 IEM_MC_END();
16712 return VINF_SUCCESS;
16713}
16714
16715
16716/** Opcode 0xdd !11/0. */
16717FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16718{
16719 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16720 IEM_MC_BEGIN(3, 2);
16721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16722 IEM_MC_LOCAL(uint16_t, u16Fsw);
16723 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16724 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16725 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16726
16727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16730 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16731
16732 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16733 IEM_MC_PREPARE_FPU_USAGE();
16734 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16735 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16736 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16737 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16738 IEM_MC_ELSE()
16739 IEM_MC_IF_FCW_IM()
16740 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16741 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16742 IEM_MC_ENDIF();
16743 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16744 IEM_MC_ENDIF();
16745 IEM_MC_ADVANCE_RIP();
16746
16747 IEM_MC_END();
16748 return VINF_SUCCESS;
16749}
16750
16751
16752/** Opcode 0xdd !11/0. */
16753FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16754{
16755 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16756 IEM_MC_BEGIN(3, 2);
16757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16758 IEM_MC_LOCAL(uint16_t, u16Fsw);
16759 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16760 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16761 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16762
16763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16765 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16766 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16767
16768 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16769 IEM_MC_PREPARE_FPU_USAGE();
16770 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16771 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16772 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16773 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16774 IEM_MC_ELSE()
16775 IEM_MC_IF_FCW_IM()
16776 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16777 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16778 IEM_MC_ENDIF();
16779 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16780 IEM_MC_ENDIF();
16781 IEM_MC_ADVANCE_RIP();
16782
16783 IEM_MC_END();
16784 return VINF_SUCCESS;
16785}
16786
16787
16788
16789
16790/** Opcode 0xdd !11/0. */
16791FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16792{
16793 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16794 IEM_MC_BEGIN(3, 2);
16795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16796 IEM_MC_LOCAL(uint16_t, u16Fsw);
16797 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16798 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16799 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16800
16801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16803 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16804 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16805
16806 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16807 IEM_MC_PREPARE_FPU_USAGE();
16808 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16809 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16810 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16811 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16812 IEM_MC_ELSE()
16813 IEM_MC_IF_FCW_IM()
16814 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16815 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16816 IEM_MC_ENDIF();
16817 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16818 IEM_MC_ENDIF();
16819 IEM_MC_ADVANCE_RIP();
16820
16821 IEM_MC_END();
16822 return VINF_SUCCESS;
16823}
16824
16825
16826/** Opcode 0xdd !11/0. */
16827FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16828{
16829 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16830 IEM_MC_BEGIN(3, 0);
16831 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16832 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16833 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16836 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16837 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16838 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16839 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16840 IEM_MC_END();
16841 return VINF_SUCCESS;
16842}
16843
16844
16845/** Opcode 0xdd !11/0. */
16846FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16847{
16848 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16849 IEM_MC_BEGIN(3, 0);
16850 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16851 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16852 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16855 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16856 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16857 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16858 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16859 IEM_MC_END();
16860 return VINF_SUCCESS;
16861
16862}
16863
16864/** Opcode 0xdd !11/0. */
16865FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16866{
16867 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16868
16869 IEM_MC_BEGIN(0, 2);
16870 IEM_MC_LOCAL(uint16_t, u16Tmp);
16871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16872
16873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16876
16877 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16878 IEM_MC_FETCH_FSW(u16Tmp);
16879 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16880 IEM_MC_ADVANCE_RIP();
16881
16882/** @todo Debug / drop a hint to the verifier that things may differ
16883 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16884 * NT4SP1. (X86_FSW_PE) */
16885 IEM_MC_END();
16886 return VINF_SUCCESS;
16887}
16888
16889
16890/** Opcode 0xdd 11/0. */
16891FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16892{
16893 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16895 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16896 unmodified. */
16897
16898 IEM_MC_BEGIN(0, 0);
16899
16900 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16901 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16902
16903 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16904 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16905 IEM_MC_UPDATE_FPU_OPCODE_IP();
16906
16907 IEM_MC_ADVANCE_RIP();
16908 IEM_MC_END();
16909 return VINF_SUCCESS;
16910}
16911
16912
16913/** Opcode 0xdd 11/1. */
16914FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16915{
16916 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16918
16919 IEM_MC_BEGIN(0, 2);
16920 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16921 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16922 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16923 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16924
16925 IEM_MC_PREPARE_FPU_USAGE();
16926 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16927 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16928 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16929 IEM_MC_ELSE()
16930 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16931 IEM_MC_ENDIF();
16932
16933 IEM_MC_ADVANCE_RIP();
16934 IEM_MC_END();
16935 return VINF_SUCCESS;
16936}
16937
16938
16939/** Opcode 0xdd 11/3. */
16940FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16941{
16942 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16943 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16944}
16945
16946
16947/** Opcode 0xdd 11/4. */
16948FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16949{
16950 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16951 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16952}
16953
16954
16955/** Opcode 0xdd. */
16956FNIEMOP_DEF(iemOp_EscF5)
16957{
16958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16959 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16961 {
16962 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16963 {
16964 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16965 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16966 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16967 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16968 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16969 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16970 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16971 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16973 }
16974 }
16975 else
16976 {
16977 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16978 {
16979 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16980 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16981 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16982 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16983 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16984 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16985 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16986 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16988 }
16989 }
16990}
16991
16992
16993/** Opcode 0xde 11/0. */
16994FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16995{
16996 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16997 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16998}
16999
17000
17001/** Opcode 0xde 11/0. */
17002FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
17003{
17004 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
17005 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
17006}
17007
17008
17009/** Opcode 0xde 0xd9. */
17010FNIEMOP_DEF(iemOp_fcompp)
17011{
17012 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
17013 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
17014}
17015
17016
17017/** Opcode 0xde 11/4. */
17018FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
17019{
17020 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
17021 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
17022}
17023
17024
17025/** Opcode 0xde 11/5. */
17026FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
17027{
17028 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
17029 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
17030}
17031
17032
17033/** Opcode 0xde 11/6. */
17034FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
17035{
17036 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
17037 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
17038}
17039
17040
17041/** Opcode 0xde 11/7. */
17042FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
17043{
17044 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
17045 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
17046}
17047
17048
17049/**
17050 * Common worker for FPU instructions working on ST0 and an m16i, and storing
17051 * the result in ST0.
17052 *
17053 * @param pfnAImpl Pointer to the instruction implementation (assembly).
17054 */
17055FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
17056{
17057 IEM_MC_BEGIN(3, 3);
17058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17059 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17060 IEM_MC_LOCAL(int16_t, i16Val2);
17061 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17062 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17063 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17064
17065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17067
17068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17069 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17070 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17071
17072 IEM_MC_PREPARE_FPU_USAGE();
17073 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17074 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
17075 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
17076 IEM_MC_ELSE()
17077 IEM_MC_FPU_STACK_UNDERFLOW(0);
17078 IEM_MC_ENDIF();
17079 IEM_MC_ADVANCE_RIP();
17080
17081 IEM_MC_END();
17082 return VINF_SUCCESS;
17083}
17084
17085
17086/** Opcode 0xde !11/0. */
17087FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
17088{
17089 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
17090 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
17091}
17092
17093
17094/** Opcode 0xde !11/1. */
17095FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
17096{
17097 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
17098 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
17099}
17100
17101
17102/** Opcode 0xde !11/2. */
17103FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
17104{
17105 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
17106
17107 IEM_MC_BEGIN(3, 3);
17108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17109 IEM_MC_LOCAL(uint16_t, u16Fsw);
17110 IEM_MC_LOCAL(int16_t, i16Val2);
17111 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17112 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17113 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17114
17115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17117
17118 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17119 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17120 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17121
17122 IEM_MC_PREPARE_FPU_USAGE();
17123 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17124 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
17125 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17126 IEM_MC_ELSE()
17127 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17128 IEM_MC_ENDIF();
17129 IEM_MC_ADVANCE_RIP();
17130
17131 IEM_MC_END();
17132 return VINF_SUCCESS;
17133}
17134
17135
17136/** Opcode 0xde !11/3. */
17137FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
17138{
17139 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
17140
17141 IEM_MC_BEGIN(3, 3);
17142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17143 IEM_MC_LOCAL(uint16_t, u16Fsw);
17144 IEM_MC_LOCAL(int16_t, i16Val2);
17145 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17146 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
17147 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
17148
17149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17151
17152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17154 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17155
17156 IEM_MC_PREPARE_FPU_USAGE();
17157 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
17158 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
17159 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17160 IEM_MC_ELSE()
17161 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17162 IEM_MC_ENDIF();
17163 IEM_MC_ADVANCE_RIP();
17164
17165 IEM_MC_END();
17166 return VINF_SUCCESS;
17167}
17168
17169
17170/** Opcode 0xde !11/4. */
17171FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
17172{
17173 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
17174 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
17175}
17176
17177
17178/** Opcode 0xde !11/5. */
17179FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
17180{
17181 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
17182 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
17183}
17184
17185
17186/** Opcode 0xde !11/6. */
17187FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
17188{
17189 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
17190 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
17191}
17192
17193
17194/** Opcode 0xde !11/7. */
17195FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
17196{
17197 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
17198 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
17199}
17200
17201
17202/** Opcode 0xde. */
17203FNIEMOP_DEF(iemOp_EscF6)
17204{
17205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17206 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
17207 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17208 {
17209 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17210 {
17211 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
17212 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
17213 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
17214 case 3: if (bRm == 0xd9)
17215 return FNIEMOP_CALL(iemOp_fcompp);
17216 return IEMOP_RAISE_INVALID_OPCODE();
17217 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
17218 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
17219 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
17220 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
17221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17222 }
17223 }
17224 else
17225 {
17226 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17227 {
17228 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
17229 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
17230 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
17231 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
17232 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
17233 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
17234 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
17235 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
17236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17237 }
17238 }
17239}
17240
17241
17242/** Opcode 0xdf 11/0.
17243 * Undocument instruction, assumed to work like ffree + fincstp. */
17244FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17245{
17246 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17248
17249 IEM_MC_BEGIN(0, 0);
17250
17251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17253
17254 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17255 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17256 IEM_MC_FPU_STACK_INC_TOP();
17257 IEM_MC_UPDATE_FPU_OPCODE_IP();
17258
17259 IEM_MC_ADVANCE_RIP();
17260 IEM_MC_END();
17261 return VINF_SUCCESS;
17262}
17263
17264
17265/** Opcode 0xdf 0xe0. */
17266FNIEMOP_DEF(iemOp_fnstsw_ax)
17267{
17268 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17270
17271 IEM_MC_BEGIN(0, 1);
17272 IEM_MC_LOCAL(uint16_t, u16Tmp);
17273 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17274 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17275 IEM_MC_FETCH_FSW(u16Tmp);
17276 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17277 IEM_MC_ADVANCE_RIP();
17278 IEM_MC_END();
17279 return VINF_SUCCESS;
17280}
17281
17282
17283/** Opcode 0xdf 11/5. */
17284FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17285{
17286 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17287 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17288}
17289
17290
17291/** Opcode 0xdf 11/6. */
17292FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17293{
17294 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17295 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17296}
17297
17298
17299/** Opcode 0xdf !11/0. */
17300FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17301{
17302 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17303
17304 IEM_MC_BEGIN(2, 3);
17305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17306 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17307 IEM_MC_LOCAL(int16_t, i16Val);
17308 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17309 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17310
17311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17313
17314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17316 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17317
17318 IEM_MC_PREPARE_FPU_USAGE();
17319 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17320 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17321 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17322 IEM_MC_ELSE()
17323 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17324 IEM_MC_ENDIF();
17325 IEM_MC_ADVANCE_RIP();
17326
17327 IEM_MC_END();
17328 return VINF_SUCCESS;
17329}
17330
17331
17332/** Opcode 0xdf !11/1. */
17333FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17334{
17335 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17336 IEM_MC_BEGIN(3, 2);
17337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17338 IEM_MC_LOCAL(uint16_t, u16Fsw);
17339 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17340 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17341 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17342
17343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17345 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17346 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17347
17348 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17349 IEM_MC_PREPARE_FPU_USAGE();
17350 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17351 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17352 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17353 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17354 IEM_MC_ELSE()
17355 IEM_MC_IF_FCW_IM()
17356 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17357 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17358 IEM_MC_ENDIF();
17359 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17360 IEM_MC_ENDIF();
17361 IEM_MC_ADVANCE_RIP();
17362
17363 IEM_MC_END();
17364 return VINF_SUCCESS;
17365}
17366
17367
17368/** Opcode 0xdf !11/2. */
17369FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17370{
17371 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17372 IEM_MC_BEGIN(3, 2);
17373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17374 IEM_MC_LOCAL(uint16_t, u16Fsw);
17375 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17376 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17377 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17378
17379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17381 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17382 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17383
17384 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17385 IEM_MC_PREPARE_FPU_USAGE();
17386 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17387 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17388 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17389 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17390 IEM_MC_ELSE()
17391 IEM_MC_IF_FCW_IM()
17392 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17393 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17394 IEM_MC_ENDIF();
17395 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17396 IEM_MC_ENDIF();
17397 IEM_MC_ADVANCE_RIP();
17398
17399 IEM_MC_END();
17400 return VINF_SUCCESS;
17401}
17402
17403
17404/** Opcode 0xdf !11/3. */
17405FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17406{
17407 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17408 IEM_MC_BEGIN(3, 2);
17409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17410 IEM_MC_LOCAL(uint16_t, u16Fsw);
17411 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17412 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17413 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17414
17415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17417 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17418 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17419
17420 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17421 IEM_MC_PREPARE_FPU_USAGE();
17422 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17423 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17424 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17425 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17426 IEM_MC_ELSE()
17427 IEM_MC_IF_FCW_IM()
17428 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17429 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17430 IEM_MC_ENDIF();
17431 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17432 IEM_MC_ENDIF();
17433 IEM_MC_ADVANCE_RIP();
17434
17435 IEM_MC_END();
17436 return VINF_SUCCESS;
17437}
17438
17439
17440/** Opcode 0xdf !11/4. */
17441FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17442
17443
17444/** Opcode 0xdf !11/5. */
17445FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17446{
17447 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17448
17449 IEM_MC_BEGIN(2, 3);
17450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17451 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17452 IEM_MC_LOCAL(int64_t, i64Val);
17453 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17454 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17455
17456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17458
17459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17461 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17462
17463 IEM_MC_PREPARE_FPU_USAGE();
17464 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17465 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17466 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17467 IEM_MC_ELSE()
17468 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17469 IEM_MC_ENDIF();
17470 IEM_MC_ADVANCE_RIP();
17471
17472 IEM_MC_END();
17473 return VINF_SUCCESS;
17474}
17475
17476
17477/** Opcode 0xdf !11/6. */
17478FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17479
17480
17481/** Opcode 0xdf !11/7. */
17482FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17483{
17484 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17485 IEM_MC_BEGIN(3, 2);
17486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17487 IEM_MC_LOCAL(uint16_t, u16Fsw);
17488 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17489 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17490 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17491
17492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17494 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17495 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17496
17497 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17498 IEM_MC_PREPARE_FPU_USAGE();
17499 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17500 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17501 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17502 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17503 IEM_MC_ELSE()
17504 IEM_MC_IF_FCW_IM()
17505 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17506 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17507 IEM_MC_ENDIF();
17508 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17509 IEM_MC_ENDIF();
17510 IEM_MC_ADVANCE_RIP();
17511
17512 IEM_MC_END();
17513 return VINF_SUCCESS;
17514}
17515
17516
17517/** Opcode 0xdf. */
17518FNIEMOP_DEF(iemOp_EscF7)
17519{
17520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17521 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17522 {
17523 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17524 {
17525 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17526 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17527 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17528 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17529 case 4: if (bRm == 0xe0)
17530 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17531 return IEMOP_RAISE_INVALID_OPCODE();
17532 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17533 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17534 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17536 }
17537 }
17538 else
17539 {
17540 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17541 {
17542 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17543 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17544 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17545 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17546 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17547 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17548 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17549 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17551 }
17552 }
17553}
17554
17555
17556/** Opcode 0xe0. */
17557FNIEMOP_DEF(iemOp_loopne_Jb)
17558{
17559 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17560 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17562 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17563
17564 switch (pVCpu->iem.s.enmEffAddrMode)
17565 {
17566 case IEMMODE_16BIT:
17567 IEM_MC_BEGIN(0,0);
17568 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17569 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17570 IEM_MC_REL_JMP_S8(i8Imm);
17571 } IEM_MC_ELSE() {
17572 IEM_MC_ADVANCE_RIP();
17573 } IEM_MC_ENDIF();
17574 IEM_MC_END();
17575 return VINF_SUCCESS;
17576
17577 case IEMMODE_32BIT:
17578 IEM_MC_BEGIN(0,0);
17579 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17580 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17581 IEM_MC_REL_JMP_S8(i8Imm);
17582 } IEM_MC_ELSE() {
17583 IEM_MC_ADVANCE_RIP();
17584 } IEM_MC_ENDIF();
17585 IEM_MC_END();
17586 return VINF_SUCCESS;
17587
17588 case IEMMODE_64BIT:
17589 IEM_MC_BEGIN(0,0);
17590 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17591 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17592 IEM_MC_REL_JMP_S8(i8Imm);
17593 } IEM_MC_ELSE() {
17594 IEM_MC_ADVANCE_RIP();
17595 } IEM_MC_ENDIF();
17596 IEM_MC_END();
17597 return VINF_SUCCESS;
17598
17599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17600 }
17601}
17602
17603
17604/** Opcode 0xe1. */
17605FNIEMOP_DEF(iemOp_loope_Jb)
17606{
17607 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17608 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17610 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17611
17612 switch (pVCpu->iem.s.enmEffAddrMode)
17613 {
17614 case IEMMODE_16BIT:
17615 IEM_MC_BEGIN(0,0);
17616 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17617 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17618 IEM_MC_REL_JMP_S8(i8Imm);
17619 } IEM_MC_ELSE() {
17620 IEM_MC_ADVANCE_RIP();
17621 } IEM_MC_ENDIF();
17622 IEM_MC_END();
17623 return VINF_SUCCESS;
17624
17625 case IEMMODE_32BIT:
17626 IEM_MC_BEGIN(0,0);
17627 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17628 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17629 IEM_MC_REL_JMP_S8(i8Imm);
17630 } IEM_MC_ELSE() {
17631 IEM_MC_ADVANCE_RIP();
17632 } IEM_MC_ENDIF();
17633 IEM_MC_END();
17634 return VINF_SUCCESS;
17635
17636 case IEMMODE_64BIT:
17637 IEM_MC_BEGIN(0,0);
17638 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17639 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17640 IEM_MC_REL_JMP_S8(i8Imm);
17641 } IEM_MC_ELSE() {
17642 IEM_MC_ADVANCE_RIP();
17643 } IEM_MC_ENDIF();
17644 IEM_MC_END();
17645 return VINF_SUCCESS;
17646
17647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17648 }
17649}
17650
17651
17652/** Opcode 0xe2. */
17653FNIEMOP_DEF(iemOp_loop_Jb)
17654{
17655 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17656 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17658 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17659
17660 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17661 * using the 32-bit operand size override. How can that be restarted? See
17662 * weird pseudo code in intel manual. */
17663 switch (pVCpu->iem.s.enmEffAddrMode)
17664 {
17665 case IEMMODE_16BIT:
17666 IEM_MC_BEGIN(0,0);
17667 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17668 {
17669 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17670 IEM_MC_IF_CX_IS_NZ() {
17671 IEM_MC_REL_JMP_S8(i8Imm);
17672 } IEM_MC_ELSE() {
17673 IEM_MC_ADVANCE_RIP();
17674 } IEM_MC_ENDIF();
17675 }
17676 else
17677 {
17678 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17679 IEM_MC_ADVANCE_RIP();
17680 }
17681 IEM_MC_END();
17682 return VINF_SUCCESS;
17683
17684 case IEMMODE_32BIT:
17685 IEM_MC_BEGIN(0,0);
17686 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17687 {
17688 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17689 IEM_MC_IF_ECX_IS_NZ() {
17690 IEM_MC_REL_JMP_S8(i8Imm);
17691 } IEM_MC_ELSE() {
17692 IEM_MC_ADVANCE_RIP();
17693 } IEM_MC_ENDIF();
17694 }
17695 else
17696 {
17697 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17698 IEM_MC_ADVANCE_RIP();
17699 }
17700 IEM_MC_END();
17701 return VINF_SUCCESS;
17702
17703 case IEMMODE_64BIT:
17704 IEM_MC_BEGIN(0,0);
17705 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17706 {
17707 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17708 IEM_MC_IF_RCX_IS_NZ() {
17709 IEM_MC_REL_JMP_S8(i8Imm);
17710 } IEM_MC_ELSE() {
17711 IEM_MC_ADVANCE_RIP();
17712 } IEM_MC_ENDIF();
17713 }
17714 else
17715 {
17716 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17717 IEM_MC_ADVANCE_RIP();
17718 }
17719 IEM_MC_END();
17720 return VINF_SUCCESS;
17721
17722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17723 }
17724}
17725
17726
17727/** Opcode 0xe3. */
17728FNIEMOP_DEF(iemOp_jecxz_Jb)
17729{
17730 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17731 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17733 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17734
17735 switch (pVCpu->iem.s.enmEffAddrMode)
17736 {
17737 case IEMMODE_16BIT:
17738 IEM_MC_BEGIN(0,0);
17739 IEM_MC_IF_CX_IS_NZ() {
17740 IEM_MC_ADVANCE_RIP();
17741 } IEM_MC_ELSE() {
17742 IEM_MC_REL_JMP_S8(i8Imm);
17743 } IEM_MC_ENDIF();
17744 IEM_MC_END();
17745 return VINF_SUCCESS;
17746
17747 case IEMMODE_32BIT:
17748 IEM_MC_BEGIN(0,0);
17749 IEM_MC_IF_ECX_IS_NZ() {
17750 IEM_MC_ADVANCE_RIP();
17751 } IEM_MC_ELSE() {
17752 IEM_MC_REL_JMP_S8(i8Imm);
17753 } IEM_MC_ENDIF();
17754 IEM_MC_END();
17755 return VINF_SUCCESS;
17756
17757 case IEMMODE_64BIT:
17758 IEM_MC_BEGIN(0,0);
17759 IEM_MC_IF_RCX_IS_NZ() {
17760 IEM_MC_ADVANCE_RIP();
17761 } IEM_MC_ELSE() {
17762 IEM_MC_REL_JMP_S8(i8Imm);
17763 } IEM_MC_ENDIF();
17764 IEM_MC_END();
17765 return VINF_SUCCESS;
17766
17767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17768 }
17769}
17770
17771
17772/** Opcode 0xe4 */
17773FNIEMOP_DEF(iemOp_in_AL_Ib)
17774{
17775 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17776 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17778 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17779}
17780
17781
17782/** Opcode 0xe5 */
17783FNIEMOP_DEF(iemOp_in_eAX_Ib)
17784{
17785 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17786 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17788 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17789}
17790
17791
17792/** Opcode 0xe6 */
17793FNIEMOP_DEF(iemOp_out_Ib_AL)
17794{
17795 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17796 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17798 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17799}
17800
17801
17802/** Opcode 0xe7 */
17803FNIEMOP_DEF(iemOp_out_Ib_eAX)
17804{
17805 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17806 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17808 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17809}
17810
17811
17812/** Opcode 0xe8. */
17813FNIEMOP_DEF(iemOp_call_Jv)
17814{
17815 IEMOP_MNEMONIC(call_Jv, "call Jv");
17816 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17817 switch (pVCpu->iem.s.enmEffOpSize)
17818 {
17819 case IEMMODE_16BIT:
17820 {
17821 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17822 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17823 }
17824
17825 case IEMMODE_32BIT:
17826 {
17827 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17828 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17829 }
17830
17831 case IEMMODE_64BIT:
17832 {
17833 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17834 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17835 }
17836
17837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17838 }
17839}
17840
17841
17842/** Opcode 0xe9. */
17843FNIEMOP_DEF(iemOp_jmp_Jv)
17844{
17845 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17846 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17847 switch (pVCpu->iem.s.enmEffOpSize)
17848 {
17849 case IEMMODE_16BIT:
17850 {
17851 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17852 IEM_MC_BEGIN(0, 0);
17853 IEM_MC_REL_JMP_S16(i16Imm);
17854 IEM_MC_END();
17855 return VINF_SUCCESS;
17856 }
17857
17858 case IEMMODE_64BIT:
17859 case IEMMODE_32BIT:
17860 {
17861 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17862 IEM_MC_BEGIN(0, 0);
17863 IEM_MC_REL_JMP_S32(i32Imm);
17864 IEM_MC_END();
17865 return VINF_SUCCESS;
17866 }
17867
17868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17869 }
17870}
17871
17872
17873/** Opcode 0xea. */
17874FNIEMOP_DEF(iemOp_jmp_Ap)
17875{
17876 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17877 IEMOP_HLP_NO_64BIT();
17878
17879 /* Decode the far pointer address and pass it on to the far call C implementation. */
17880 uint32_t offSeg;
17881 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17882 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17883 else
17884 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17885 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17887 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17888}
17889
17890
17891/** Opcode 0xeb. */
17892FNIEMOP_DEF(iemOp_jmp_Jb)
17893{
17894 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17895 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17897 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17898
17899 IEM_MC_BEGIN(0, 0);
17900 IEM_MC_REL_JMP_S8(i8Imm);
17901 IEM_MC_END();
17902 return VINF_SUCCESS;
17903}
17904
17905
17906/** Opcode 0xec */
17907FNIEMOP_DEF(iemOp_in_AL_DX)
17908{
17909 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17911 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17912}
17913
17914
17915/** Opcode 0xed */
17916FNIEMOP_DEF(iemOp_eAX_DX)
17917{
17918 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17920 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17921}
17922
17923
17924/** Opcode 0xee */
17925FNIEMOP_DEF(iemOp_out_DX_AL)
17926{
17927 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17929 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17930}
17931
17932
17933/** Opcode 0xef */
17934FNIEMOP_DEF(iemOp_out_DX_eAX)
17935{
17936 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17938 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17939}
17940
17941
17942/** Opcode 0xf0. */
17943FNIEMOP_DEF(iemOp_lock)
17944{
17945 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17946 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17947
17948 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17949 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17950}
17951
17952
17953/** Opcode 0xf1. */
17954FNIEMOP_DEF(iemOp_int_1)
17955{
17956 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17957 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17958 /** @todo testcase! */
17959 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17960}
17961
17962
17963/** Opcode 0xf2. */
17964FNIEMOP_DEF(iemOp_repne)
17965{
17966 /* This overrides any previous REPE prefix. */
17967 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17968 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17969 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17970
17971 /* For the 4 entry opcode tables, REPNZ overrides any previous
17972 REPZ and operand size prefixes. */
17973 pVCpu->iem.s.idxPrefix = 3;
17974
17975 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17976 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17977}
17978
17979
17980/** Opcode 0xf3. */
17981FNIEMOP_DEF(iemOp_repe)
17982{
17983 /* This overrides any previous REPNE prefix. */
17984 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17985 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17986 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17987
17988 /* For the 4 entry opcode tables, REPNZ overrides any previous
17989 REPNZ and operand size prefixes. */
17990 pVCpu->iem.s.idxPrefix = 2;
17991
17992 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17993 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17994}
17995
17996
17997/** Opcode 0xf4. */
17998FNIEMOP_DEF(iemOp_hlt)
17999{
18000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18001 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
18002}
18003
18004
18005/** Opcode 0xf5. */
18006FNIEMOP_DEF(iemOp_cmc)
18007{
18008 IEMOP_MNEMONIC(cmc, "cmc");
18009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18010 IEM_MC_BEGIN(0, 0);
18011 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
18012 IEM_MC_ADVANCE_RIP();
18013 IEM_MC_END();
18014 return VINF_SUCCESS;
18015}
18016
18017
18018/**
18019 * Common implementation of 'inc/dec/not/neg Eb'.
18020 *
18021 * @param bRm The RM byte.
18022 * @param pImpl The instruction implementation.
18023 */
18024FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
18025{
18026 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18027 {
18028 /* register access */
18029 IEM_MC_BEGIN(2, 0);
18030 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18031 IEM_MC_ARG(uint32_t *, pEFlags, 1);
18032 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18033 IEM_MC_REF_EFLAGS(pEFlags);
18034 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
18035 IEM_MC_ADVANCE_RIP();
18036 IEM_MC_END();
18037 }
18038 else
18039 {
18040 /* memory access. */
18041 IEM_MC_BEGIN(2, 2);
18042 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18043 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18045
18046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18047 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18048 IEM_MC_FETCH_EFLAGS(EFlags);
18049 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18050 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
18051 else
18052 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
18053
18054 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
18055 IEM_MC_COMMIT_EFLAGS(EFlags);
18056 IEM_MC_ADVANCE_RIP();
18057 IEM_MC_END();
18058 }
18059 return VINF_SUCCESS;
18060}
18061
18062
18063/**
18064 * Common implementation of 'inc/dec/not/neg Ev'.
18065 *
18066 * @param bRm The RM byte.
18067 * @param pImpl The instruction implementation.
18068 */
18069FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
18070{
18071 /* Registers are handled by a common worker. */
18072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18073 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18074
18075 /* Memory we do here. */
18076 switch (pVCpu->iem.s.enmEffOpSize)
18077 {
18078 case IEMMODE_16BIT:
18079 IEM_MC_BEGIN(2, 2);
18080 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18081 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18083
18084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18085 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18086 IEM_MC_FETCH_EFLAGS(EFlags);
18087 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18088 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
18089 else
18090 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
18091
18092 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
18093 IEM_MC_COMMIT_EFLAGS(EFlags);
18094 IEM_MC_ADVANCE_RIP();
18095 IEM_MC_END();
18096 return VINF_SUCCESS;
18097
18098 case IEMMODE_32BIT:
18099 IEM_MC_BEGIN(2, 2);
18100 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18101 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18103
18104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18105 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18106 IEM_MC_FETCH_EFLAGS(EFlags);
18107 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18108 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
18109 else
18110 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
18111
18112 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
18113 IEM_MC_COMMIT_EFLAGS(EFlags);
18114 IEM_MC_ADVANCE_RIP();
18115 IEM_MC_END();
18116 return VINF_SUCCESS;
18117
18118 case IEMMODE_64BIT:
18119 IEM_MC_BEGIN(2, 2);
18120 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18121 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
18122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18123
18124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18125 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18126 IEM_MC_FETCH_EFLAGS(EFlags);
18127 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
18128 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
18129 else
18130 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
18131
18132 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
18133 IEM_MC_COMMIT_EFLAGS(EFlags);
18134 IEM_MC_ADVANCE_RIP();
18135 IEM_MC_END();
18136 return VINF_SUCCESS;
18137
18138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18139 }
18140}
18141
18142
18143/** Opcode 0xf6 /0. */
18144FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
18145{
18146 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
18147 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18148
18149 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18150 {
18151 /* register access */
18152 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
18153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18154
18155 IEM_MC_BEGIN(3, 0);
18156 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18157 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
18158 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18159 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18160 IEM_MC_REF_EFLAGS(pEFlags);
18161 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
18162 IEM_MC_ADVANCE_RIP();
18163 IEM_MC_END();
18164 }
18165 else
18166 {
18167 /* memory access. */
18168 IEM_MC_BEGIN(3, 2);
18169 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
18170 IEM_MC_ARG(uint8_t, u8Src, 1);
18171 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18173
18174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
18175 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
18176 IEM_MC_ASSIGN(u8Src, u8Imm);
18177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18178 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18179 IEM_MC_FETCH_EFLAGS(EFlags);
18180 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
18181
18182 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
18183 IEM_MC_COMMIT_EFLAGS(EFlags);
18184 IEM_MC_ADVANCE_RIP();
18185 IEM_MC_END();
18186 }
18187 return VINF_SUCCESS;
18188}
18189
18190
18191/** Opcode 0xf7 /0. */
18192FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
18193{
18194 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
18195 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
18196
18197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18198 {
18199 /* register access */
18200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18201 switch (pVCpu->iem.s.enmEffOpSize)
18202 {
18203 case IEMMODE_16BIT:
18204 {
18205 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18206 IEM_MC_BEGIN(3, 0);
18207 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18208 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
18209 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18210 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18211 IEM_MC_REF_EFLAGS(pEFlags);
18212 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18213 IEM_MC_ADVANCE_RIP();
18214 IEM_MC_END();
18215 return VINF_SUCCESS;
18216 }
18217
18218 case IEMMODE_32BIT:
18219 {
18220 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18221 IEM_MC_BEGIN(3, 0);
18222 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18223 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
18224 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18225 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18226 IEM_MC_REF_EFLAGS(pEFlags);
18227 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18228 /* No clearing the high dword here - test doesn't write back the result. */
18229 IEM_MC_ADVANCE_RIP();
18230 IEM_MC_END();
18231 return VINF_SUCCESS;
18232 }
18233
18234 case IEMMODE_64BIT:
18235 {
18236 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18237 IEM_MC_BEGIN(3, 0);
18238 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18239 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
18240 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18241 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18242 IEM_MC_REF_EFLAGS(pEFlags);
18243 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18244 IEM_MC_ADVANCE_RIP();
18245 IEM_MC_END();
18246 return VINF_SUCCESS;
18247 }
18248
18249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18250 }
18251 }
18252 else
18253 {
18254 /* memory access. */
18255 switch (pVCpu->iem.s.enmEffOpSize)
18256 {
18257 case IEMMODE_16BIT:
18258 {
18259 IEM_MC_BEGIN(3, 2);
18260 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18261 IEM_MC_ARG(uint16_t, u16Src, 1);
18262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18264
18265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18266 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18267 IEM_MC_ASSIGN(u16Src, u16Imm);
18268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18269 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18270 IEM_MC_FETCH_EFLAGS(EFlags);
18271 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18272
18273 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18274 IEM_MC_COMMIT_EFLAGS(EFlags);
18275 IEM_MC_ADVANCE_RIP();
18276 IEM_MC_END();
18277 return VINF_SUCCESS;
18278 }
18279
18280 case IEMMODE_32BIT:
18281 {
18282 IEM_MC_BEGIN(3, 2);
18283 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18284 IEM_MC_ARG(uint32_t, u32Src, 1);
18285 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18287
18288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18289 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18290 IEM_MC_ASSIGN(u32Src, u32Imm);
18291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18292 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18293 IEM_MC_FETCH_EFLAGS(EFlags);
18294 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18295
18296 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18297 IEM_MC_COMMIT_EFLAGS(EFlags);
18298 IEM_MC_ADVANCE_RIP();
18299 IEM_MC_END();
18300 return VINF_SUCCESS;
18301 }
18302
18303 case IEMMODE_64BIT:
18304 {
18305 IEM_MC_BEGIN(3, 2);
18306 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18307 IEM_MC_ARG(uint64_t, u64Src, 1);
18308 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18310
18311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18312 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18313 IEM_MC_ASSIGN(u64Src, u64Imm);
18314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18315 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18316 IEM_MC_FETCH_EFLAGS(EFlags);
18317 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18318
18319 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18320 IEM_MC_COMMIT_EFLAGS(EFlags);
18321 IEM_MC_ADVANCE_RIP();
18322 IEM_MC_END();
18323 return VINF_SUCCESS;
18324 }
18325
18326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18327 }
18328 }
18329}
18330
18331
18332/** Opcode 0xf6 /4, /5, /6 and /7. */
18333FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18334{
18335 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18336 {
18337 /* register access */
18338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18339 IEM_MC_BEGIN(3, 1);
18340 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18341 IEM_MC_ARG(uint8_t, u8Value, 1);
18342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18343 IEM_MC_LOCAL(int32_t, rc);
18344
18345 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18346 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18347 IEM_MC_REF_EFLAGS(pEFlags);
18348 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18349 IEM_MC_IF_LOCAL_IS_Z(rc) {
18350 IEM_MC_ADVANCE_RIP();
18351 } IEM_MC_ELSE() {
18352 IEM_MC_RAISE_DIVIDE_ERROR();
18353 } IEM_MC_ENDIF();
18354
18355 IEM_MC_END();
18356 }
18357 else
18358 {
18359 /* memory access. */
18360 IEM_MC_BEGIN(3, 2);
18361 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18362 IEM_MC_ARG(uint8_t, u8Value, 1);
18363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18365 IEM_MC_LOCAL(int32_t, rc);
18366
18367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18369 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18370 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18371 IEM_MC_REF_EFLAGS(pEFlags);
18372 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18373 IEM_MC_IF_LOCAL_IS_Z(rc) {
18374 IEM_MC_ADVANCE_RIP();
18375 } IEM_MC_ELSE() {
18376 IEM_MC_RAISE_DIVIDE_ERROR();
18377 } IEM_MC_ENDIF();
18378
18379 IEM_MC_END();
18380 }
18381 return VINF_SUCCESS;
18382}
18383
18384
18385/** Opcode 0xf7 /4, /5, /6 and /7. */
18386FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18387{
18388 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18389
18390 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18391 {
18392 /* register access */
18393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18394 switch (pVCpu->iem.s.enmEffOpSize)
18395 {
18396 case IEMMODE_16BIT:
18397 {
18398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18399 IEM_MC_BEGIN(4, 1);
18400 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18401 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18402 IEM_MC_ARG(uint16_t, u16Value, 2);
18403 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18404 IEM_MC_LOCAL(int32_t, rc);
18405
18406 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18407 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18408 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18409 IEM_MC_REF_EFLAGS(pEFlags);
18410 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18411 IEM_MC_IF_LOCAL_IS_Z(rc) {
18412 IEM_MC_ADVANCE_RIP();
18413 } IEM_MC_ELSE() {
18414 IEM_MC_RAISE_DIVIDE_ERROR();
18415 } IEM_MC_ENDIF();
18416
18417 IEM_MC_END();
18418 return VINF_SUCCESS;
18419 }
18420
18421 case IEMMODE_32BIT:
18422 {
18423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18424 IEM_MC_BEGIN(4, 1);
18425 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18426 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18427 IEM_MC_ARG(uint32_t, u32Value, 2);
18428 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18429 IEM_MC_LOCAL(int32_t, rc);
18430
18431 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18432 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18433 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18434 IEM_MC_REF_EFLAGS(pEFlags);
18435 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18436 IEM_MC_IF_LOCAL_IS_Z(rc) {
18437 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18438 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18439 IEM_MC_ADVANCE_RIP();
18440 } IEM_MC_ELSE() {
18441 IEM_MC_RAISE_DIVIDE_ERROR();
18442 } IEM_MC_ENDIF();
18443
18444 IEM_MC_END();
18445 return VINF_SUCCESS;
18446 }
18447
18448 case IEMMODE_64BIT:
18449 {
18450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18451 IEM_MC_BEGIN(4, 1);
18452 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18453 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18454 IEM_MC_ARG(uint64_t, u64Value, 2);
18455 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18456 IEM_MC_LOCAL(int32_t, rc);
18457
18458 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18459 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18460 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18461 IEM_MC_REF_EFLAGS(pEFlags);
18462 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18463 IEM_MC_IF_LOCAL_IS_Z(rc) {
18464 IEM_MC_ADVANCE_RIP();
18465 } IEM_MC_ELSE() {
18466 IEM_MC_RAISE_DIVIDE_ERROR();
18467 } IEM_MC_ENDIF();
18468
18469 IEM_MC_END();
18470 return VINF_SUCCESS;
18471 }
18472
18473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18474 }
18475 }
18476 else
18477 {
18478 /* memory access. */
18479 switch (pVCpu->iem.s.enmEffOpSize)
18480 {
18481 case IEMMODE_16BIT:
18482 {
18483 IEM_MC_BEGIN(4, 2);
18484 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18485 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18486 IEM_MC_ARG(uint16_t, u16Value, 2);
18487 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18489 IEM_MC_LOCAL(int32_t, rc);
18490
18491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18493 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18494 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18495 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18496 IEM_MC_REF_EFLAGS(pEFlags);
18497 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18498 IEM_MC_IF_LOCAL_IS_Z(rc) {
18499 IEM_MC_ADVANCE_RIP();
18500 } IEM_MC_ELSE() {
18501 IEM_MC_RAISE_DIVIDE_ERROR();
18502 } IEM_MC_ENDIF();
18503
18504 IEM_MC_END();
18505 return VINF_SUCCESS;
18506 }
18507
18508 case IEMMODE_32BIT:
18509 {
18510 IEM_MC_BEGIN(4, 2);
18511 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18512 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18513 IEM_MC_ARG(uint32_t, u32Value, 2);
18514 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18516 IEM_MC_LOCAL(int32_t, rc);
18517
18518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18520 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18521 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18522 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18523 IEM_MC_REF_EFLAGS(pEFlags);
18524 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18525 IEM_MC_IF_LOCAL_IS_Z(rc) {
18526 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18527 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18528 IEM_MC_ADVANCE_RIP();
18529 } IEM_MC_ELSE() {
18530 IEM_MC_RAISE_DIVIDE_ERROR();
18531 } IEM_MC_ENDIF();
18532
18533 IEM_MC_END();
18534 return VINF_SUCCESS;
18535 }
18536
18537 case IEMMODE_64BIT:
18538 {
18539 IEM_MC_BEGIN(4, 2);
18540 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18541 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18542 IEM_MC_ARG(uint64_t, u64Value, 2);
18543 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18545 IEM_MC_LOCAL(int32_t, rc);
18546
18547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18549 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18550 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18551 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18552 IEM_MC_REF_EFLAGS(pEFlags);
18553 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18554 IEM_MC_IF_LOCAL_IS_Z(rc) {
18555 IEM_MC_ADVANCE_RIP();
18556 } IEM_MC_ELSE() {
18557 IEM_MC_RAISE_DIVIDE_ERROR();
18558 } IEM_MC_ENDIF();
18559
18560 IEM_MC_END();
18561 return VINF_SUCCESS;
18562 }
18563
18564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18565 }
18566 }
18567}
18568
18569/** Opcode 0xf6. */
18570FNIEMOP_DEF(iemOp_Grp3_Eb)
18571{
18572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18573 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18574 {
18575 case 0:
18576 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18577 case 1:
18578/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18579 return IEMOP_RAISE_INVALID_OPCODE();
18580 case 2:
18581 IEMOP_MNEMONIC(not_Eb, "not Eb");
18582 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18583 case 3:
18584 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18585 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18586 case 4:
18587 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18588 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18589 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18590 case 5:
18591 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18592 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18593 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18594 case 6:
18595 IEMOP_MNEMONIC(div_Eb, "div Eb");
18596 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18597 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18598 case 7:
18599 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18600 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18601 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18603 }
18604}
18605
18606
18607/** Opcode 0xf7. */
18608FNIEMOP_DEF(iemOp_Grp3_Ev)
18609{
18610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18611 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18612 {
18613 case 0:
18614 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18615 case 1:
18616/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18617 return IEMOP_RAISE_INVALID_OPCODE();
18618 case 2:
18619 IEMOP_MNEMONIC(not_Ev, "not Ev");
18620 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18621 case 3:
18622 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18623 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18624 case 4:
18625 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18626 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18627 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18628 case 5:
18629 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18630 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18631 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18632 case 6:
18633 IEMOP_MNEMONIC(div_Ev, "div Ev");
18634 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18635 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18636 case 7:
18637 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18638 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18639 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18641 }
18642}
18643
18644
18645/** Opcode 0xf8. */
18646FNIEMOP_DEF(iemOp_clc)
18647{
18648 IEMOP_MNEMONIC(clc, "clc");
18649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18650 IEM_MC_BEGIN(0, 0);
18651 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18652 IEM_MC_ADVANCE_RIP();
18653 IEM_MC_END();
18654 return VINF_SUCCESS;
18655}
18656
18657
18658/** Opcode 0xf9. */
18659FNIEMOP_DEF(iemOp_stc)
18660{
18661 IEMOP_MNEMONIC(stc, "stc");
18662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18663 IEM_MC_BEGIN(0, 0);
18664 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18665 IEM_MC_ADVANCE_RIP();
18666 IEM_MC_END();
18667 return VINF_SUCCESS;
18668}
18669
18670
18671/** Opcode 0xfa. */
18672FNIEMOP_DEF(iemOp_cli)
18673{
18674 IEMOP_MNEMONIC(cli, "cli");
18675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18676 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18677}
18678
18679
18680FNIEMOP_DEF(iemOp_sti)
18681{
18682 IEMOP_MNEMONIC(sti, "sti");
18683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18684 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18685}
18686
18687
18688/** Opcode 0xfc. */
18689FNIEMOP_DEF(iemOp_cld)
18690{
18691 IEMOP_MNEMONIC(cld, "cld");
18692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18693 IEM_MC_BEGIN(0, 0);
18694 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18695 IEM_MC_ADVANCE_RIP();
18696 IEM_MC_END();
18697 return VINF_SUCCESS;
18698}
18699
18700
18701/** Opcode 0xfd. */
18702FNIEMOP_DEF(iemOp_std)
18703{
18704 IEMOP_MNEMONIC(std, "std");
18705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18706 IEM_MC_BEGIN(0, 0);
18707 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18708 IEM_MC_ADVANCE_RIP();
18709 IEM_MC_END();
18710 return VINF_SUCCESS;
18711}
18712
18713
18714/** Opcode 0xfe. */
18715FNIEMOP_DEF(iemOp_Grp4)
18716{
18717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18718 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18719 {
18720 case 0:
18721 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18722 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18723 case 1:
18724 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18725 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18726 default:
18727 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18728 return IEMOP_RAISE_INVALID_OPCODE();
18729 }
18730}
18731
18732
18733/**
18734 * Opcode 0xff /2.
18735 * @param bRm The RM byte.
18736 */
18737FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18738{
18739 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18740 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18741
18742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18743 {
18744 /* The new RIP is taken from a register. */
18745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18746 switch (pVCpu->iem.s.enmEffOpSize)
18747 {
18748 case IEMMODE_16BIT:
18749 IEM_MC_BEGIN(1, 0);
18750 IEM_MC_ARG(uint16_t, u16Target, 0);
18751 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18752 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18753 IEM_MC_END()
18754 return VINF_SUCCESS;
18755
18756 case IEMMODE_32BIT:
18757 IEM_MC_BEGIN(1, 0);
18758 IEM_MC_ARG(uint32_t, u32Target, 0);
18759 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18760 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18761 IEM_MC_END()
18762 return VINF_SUCCESS;
18763
18764 case IEMMODE_64BIT:
18765 IEM_MC_BEGIN(1, 0);
18766 IEM_MC_ARG(uint64_t, u64Target, 0);
18767 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18768 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18769 IEM_MC_END()
18770 return VINF_SUCCESS;
18771
18772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18773 }
18774 }
18775 else
18776 {
18777 /* The new RIP is taken from a register. */
18778 switch (pVCpu->iem.s.enmEffOpSize)
18779 {
18780 case IEMMODE_16BIT:
18781 IEM_MC_BEGIN(1, 1);
18782 IEM_MC_ARG(uint16_t, u16Target, 0);
18783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18786 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18787 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18788 IEM_MC_END()
18789 return VINF_SUCCESS;
18790
18791 case IEMMODE_32BIT:
18792 IEM_MC_BEGIN(1, 1);
18793 IEM_MC_ARG(uint32_t, u32Target, 0);
18794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18797 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18798 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18799 IEM_MC_END()
18800 return VINF_SUCCESS;
18801
18802 case IEMMODE_64BIT:
18803 IEM_MC_BEGIN(1, 1);
18804 IEM_MC_ARG(uint64_t, u64Target, 0);
18805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18808 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18809 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18810 IEM_MC_END()
18811 return VINF_SUCCESS;
18812
18813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18814 }
18815 }
18816}
18817
18818typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18819
18820FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18821{
18822 /* Registers? How?? */
18823 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18824 { /* likely */ }
18825 else
18826 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18827
18828 /* Far pointer loaded from memory. */
18829 switch (pVCpu->iem.s.enmEffOpSize)
18830 {
18831 case IEMMODE_16BIT:
18832 IEM_MC_BEGIN(3, 1);
18833 IEM_MC_ARG(uint16_t, u16Sel, 0);
18834 IEM_MC_ARG(uint16_t, offSeg, 1);
18835 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18839 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18840 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18841 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18842 IEM_MC_END();
18843 return VINF_SUCCESS;
18844
18845 case IEMMODE_64BIT:
18846 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18847 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18848 * and call far qword [rsp] encodings. */
18849 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18850 {
18851 IEM_MC_BEGIN(3, 1);
18852 IEM_MC_ARG(uint16_t, u16Sel, 0);
18853 IEM_MC_ARG(uint64_t, offSeg, 1);
18854 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18858 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18859 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18860 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18861 IEM_MC_END();
18862 return VINF_SUCCESS;
18863 }
18864 /* AMD falls thru. */
18865 /* fall thru */
18866
18867 case IEMMODE_32BIT:
18868 IEM_MC_BEGIN(3, 1);
18869 IEM_MC_ARG(uint16_t, u16Sel, 0);
18870 IEM_MC_ARG(uint32_t, offSeg, 1);
18871 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18875 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18876 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18877 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18878 IEM_MC_END();
18879 return VINF_SUCCESS;
18880
18881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18882 }
18883}
18884
18885
18886/**
18887 * Opcode 0xff /3.
18888 * @param bRm The RM byte.
18889 */
18890FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18891{
18892 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18893 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18894}
18895
18896
18897/**
18898 * Opcode 0xff /4.
18899 * @param bRm The RM byte.
18900 */
18901FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18902{
18903 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18904 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18905
18906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18907 {
18908 /* The new RIP is taken from a register. */
18909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18910 switch (pVCpu->iem.s.enmEffOpSize)
18911 {
18912 case IEMMODE_16BIT:
18913 IEM_MC_BEGIN(0, 1);
18914 IEM_MC_LOCAL(uint16_t, u16Target);
18915 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18916 IEM_MC_SET_RIP_U16(u16Target);
18917 IEM_MC_END()
18918 return VINF_SUCCESS;
18919
18920 case IEMMODE_32BIT:
18921 IEM_MC_BEGIN(0, 1);
18922 IEM_MC_LOCAL(uint32_t, u32Target);
18923 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18924 IEM_MC_SET_RIP_U32(u32Target);
18925 IEM_MC_END()
18926 return VINF_SUCCESS;
18927
18928 case IEMMODE_64BIT:
18929 IEM_MC_BEGIN(0, 1);
18930 IEM_MC_LOCAL(uint64_t, u64Target);
18931 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18932 IEM_MC_SET_RIP_U64(u64Target);
18933 IEM_MC_END()
18934 return VINF_SUCCESS;
18935
18936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18937 }
18938 }
18939 else
18940 {
18941 /* The new RIP is taken from a memory location. */
18942 switch (pVCpu->iem.s.enmEffOpSize)
18943 {
18944 case IEMMODE_16BIT:
18945 IEM_MC_BEGIN(0, 2);
18946 IEM_MC_LOCAL(uint16_t, u16Target);
18947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18950 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18951 IEM_MC_SET_RIP_U16(u16Target);
18952 IEM_MC_END()
18953 return VINF_SUCCESS;
18954
18955 case IEMMODE_32BIT:
18956 IEM_MC_BEGIN(0, 2);
18957 IEM_MC_LOCAL(uint32_t, u32Target);
18958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18961 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18962 IEM_MC_SET_RIP_U32(u32Target);
18963 IEM_MC_END()
18964 return VINF_SUCCESS;
18965
18966 case IEMMODE_64BIT:
18967 IEM_MC_BEGIN(0, 2);
18968 IEM_MC_LOCAL(uint64_t, u64Target);
18969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18972 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18973 IEM_MC_SET_RIP_U64(u64Target);
18974 IEM_MC_END()
18975 return VINF_SUCCESS;
18976
18977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18978 }
18979 }
18980}
18981
18982
18983/**
18984 * Opcode 0xff /5.
18985 * @param bRm The RM byte.
18986 */
18987FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18988{
18989 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18990 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18991}
18992
18993
18994/**
18995 * Opcode 0xff /6.
18996 * @param bRm The RM byte.
18997 */
18998FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18999{
19000 IEMOP_MNEMONIC(push_Ev, "push Ev");
19001
19002 /* Registers are handled by a common worker. */
19003 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
19004 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
19005
19006 /* Memory we do here. */
19007 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
19008 switch (pVCpu->iem.s.enmEffOpSize)
19009 {
19010 case IEMMODE_16BIT:
19011 IEM_MC_BEGIN(0, 2);
19012 IEM_MC_LOCAL(uint16_t, u16Src);
19013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
19014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
19015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
19016 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
19017 IEM_MC_PUSH_U16(u16Src);
19018 IEM_MC_ADVANCE_RIP();
19019 IEM_MC_END();
19020 return VINF_SUCCESS;
19021
19022 case IEMMODE_32BIT:
19023 IEM_MC_BEGIN(0, 2);
19024 IEM_MC_LOCAL(uint32_t, u32Src);
19025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
19026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
19027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
19028 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
19029 IEM_MC_PUSH_U32(u32Src);
19030 IEM_MC_ADVANCE_RIP();
19031 IEM_MC_END();
19032 return VINF_SUCCESS;
19033
19034 case IEMMODE_64BIT:
19035 IEM_MC_BEGIN(0, 2);
19036 IEM_MC_LOCAL(uint64_t, u64Src);
19037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
19038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
19039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
19040 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
19041 IEM_MC_PUSH_U64(u64Src);
19042 IEM_MC_ADVANCE_RIP();
19043 IEM_MC_END();
19044 return VINF_SUCCESS;
19045
19046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
19047 }
19048}
19049
19050
19051/** Opcode 0xff. */
19052FNIEMOP_DEF(iemOp_Grp5)
19053{
19054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
19055 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
19056 {
19057 case 0:
19058 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
19059 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
19060 case 1:
19061 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
19062 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
19063 case 2:
19064 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
19065 case 3:
19066 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
19067 case 4:
19068 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
19069 case 5:
19070 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
19071 case 6:
19072 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
19073 case 7:
19074 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
19075 return IEMOP_RAISE_INVALID_OPCODE();
19076 }
19077 AssertFailedReturn(VERR_IEM_IPE_3);
19078}
19079
19080
19081
19082const PFNIEMOP g_apfnOneByteMap[256] =
19083{
19084 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
19085 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
19086 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
19087 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
19088 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
19089 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
19090 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
19091 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
19092 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
19093 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
19094 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
19095 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
19096 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
19097 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
19098 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
19099 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
19100 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
19101 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
19102 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
19103 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
19104 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
19105 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
19106 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
19107 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
19108 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
19109 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
19110 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
19111 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
19112 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
19113 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
19114 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
19115 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
19116 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
19117 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
19118 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
19119 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
19120 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
19121 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
19122 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
19123 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
19124 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
19125 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
19126 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
19127 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
19128 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
19129 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
19130 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
19131 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
19132 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
19133 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
19134 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
19135 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
19136 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
19137 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
19138 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
19139 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
19140 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
19141 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
19142 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
19143 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
19144 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
19145 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
19146 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
19147 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
19148};
19149
19150
19151/** @} */
19152
19153#ifdef _MSC_VER
19154# pragma warning(pop)
19155#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette